signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LU { /** * Returns the pivot sign . */ private static int pivsign ( int [ ] piv , int n ) { } }
int pivsign = 1 ; for ( int i = 0 ; i < n ; i ++ ) { if ( piv [ i ] != ( i + 1 ) ) pivsign = - pivsign ; } return pivsign ;
public class Path { /** * move to the next key in the tree */ void successor ( ) { } }
Object [ ] node = currentNode ( ) ; int i = currentIndex ( ) ; if ( ! isLeaf ( node ) ) { // if we ' re on a key in a branch , we MUST have a descendant either side of us , // so we always go down the left - most child until we hit a leaf node = ( Object [ ] ) node [ getBranchKeyEnd ( node ) + i + 1 ] ; while ( ! isLeaf ( node ) ) { push ( node , - 1 ) ; node = ( Object [ ] ) node [ getBranchKeyEnd ( node ) ] ; } push ( node , 0 ) ; return ; } // if we haven ' t reached the end of this leaf , just increment our index and return i += 1 ; if ( i < getLeafKeyEnd ( node ) ) { // moved to the next key in the same leaf setIndex ( i ) ; return ; } // we ' ve reached the end of this leaf , // so go up until we reach something we ' ve not finished visiting while ( ! isRoot ( ) ) { pop ( ) ; i = currentIndex ( ) + 1 ; node = currentNode ( ) ; if ( i < getKeyEnd ( node ) ) { setIndex ( i ) ; return ; } } // we ' ve visited the last key in the root node , so we ' re done setIndex ( getKeyEnd ( node ) ) ;
public class HBaseDataHandler { /** * ( non - Javadoc ) * @ see * com . impetus . client . hbase . admin . DataHandler # writeJoinTableData ( java . lang * . String , java . lang . String , java . util . Map ) */ @ Override public void writeJoinTableData ( String tableName , Object rowId , Map < String , Object > columns , String columnFamilyName ) throws IOException { } }
HTableInterface hTable = gethTable ( tableName ) ; hbaseWriter . writeColumns ( hTable , rowId , columns , columnFamilyName ) ; puthTable ( hTable ) ;
public class AmqpClient { /** * Sends a StartOkConnection to server . * @ param clientProperties * @ param mechanism * @ param response * @ param locale * @ return AmqpClient */ AmqpClient startOkConnection ( AmqpArguments clientProperties , String mechanism , String response , String locale ) { } }
this . startOkConnection ( clientProperties , mechanism , response , locale , null , null ) ; return this ;
public class ClientEventHandler { /** * Invoke the execute method * @ param method the execute method * @ param listener the listener * @ param userAgent the user agent object */ protected void invokeExecuteMethod ( Method method , Object listener , Object userAgent ) { } }
ReflectMethodUtil . invokeExecuteMethod ( method , listener , context , userAgent ) ;
public class Vector3f { /** * Set the x , y and z components to match the supplied vector . * Note that due to the given vector < code > v < / code > storing the components in double - precision , * there is the possibility to lose precision . * @ param v * contains the values of x , y and z to set * @ return this */ public Vector3f set ( Vector3dc v ) { } }
return set ( ( float ) v . x ( ) , ( float ) v . y ( ) , ( float ) v . z ( ) ) ;
public class MtasSolrComponentVersion { /** * ( non - Javadoc ) * @ see * mtas . solr . handler . component . util . MtasSolrComponent # create ( mtas . codec . util . * CodecComponent . BasicComponent , java . lang . Boolean ) */ @ Override public SimpleOrderedMap < Object > create ( ComponentVersion version , Boolean encode ) throws IOException { } }
return getVersion ( ) ;
public class CmsReplaceDialog { /** * Starts the upload progress bar . < p > */ private void showProgress ( ) { } }
CmsFileInfo fileInfo = m_fileInput . getFiles ( ) [ 0 ] ; m_progressInfo = new CmsUploadProgressInfo ( Collections . singletonList ( fileInfo . getFileName ( ) ) ) ; m_progressInfo . setContentLength ( fileInfo . getFileSize ( ) ) ; m_mainPanel . setContainerWidget ( m_progressInfo ) ; m_updateProgressTimer = new Timer ( ) { @ Override public void run ( ) { updateProgress ( ) ; } } ; m_updateProgressTimer . scheduleRepeating ( 1000 ) ;
public class LinuxTaskController { /** * a file and execute it . */ private void writeCommand ( String cmdLine , String directory ) throws IOException { } }
PrintWriter pw = null ; String commandFile = directory + File . separator + COMMAND_FILE ; LOG . info ( "Writing commands to " + commandFile ) ; try { FileWriter fw = new FileWriter ( commandFile ) ; BufferedWriter bw = new BufferedWriter ( fw ) ; pw = new PrintWriter ( bw ) ; pw . write ( cmdLine ) ; } catch ( IOException ioe ) { LOG . error ( "Caught IOException while writing JVM command line to file. " + ioe . getMessage ( ) ) ; } finally { if ( pw != null ) { pw . close ( ) ; } // set execute permissions for all on the file . File f = new File ( commandFile ) ; if ( f . exists ( ) ) { f . setReadable ( true , false ) ; f . setExecutable ( true , false ) ; } }
public class DefaultTransformer { /** * { @ inheritDoc } */ @ Override protected Iterable < Target > output ( ) { } }
if ( currentInput == null && currentOutput == null ) return null ; // In case a transformation is already in progress , return the existing output iterable if ( currentOutput != null ) return currentOutput ; // No transformation is in progress - > create new output and forget the input currentOutput = new DefaultTransformerIterable < Target > ( currentInput . iterator ( ) ) ; currentInput = null ; return currentOutput ;
public class ReflectionUtil { /** * For a field of type List < Enumeration < Foo > > , returns Foo */ public static Class < ? > getGenericCollectionTypeOfFieldWithSecondOrderForList ( Field next ) { } }
if ( ! List . class . isAssignableFrom ( next . getType ( ) ) ) { return getGenericCollectionTypeOfField ( next ) ; } Class < ? > type ; ParameterizedType collectionType = ( ParameterizedType ) next . getGenericType ( ) ; Type firstArg = collectionType . getActualTypeArguments ( ) [ 0 ] ; if ( ParameterizedType . class . isAssignableFrom ( firstArg . getClass ( ) ) ) { ParameterizedType pt = ( ( ParameterizedType ) firstArg ) ; Type pt2 = pt . getActualTypeArguments ( ) [ 0 ] ; return ( Class < ? > ) pt2 ; } type = ( Class < ? > ) firstArg ; return type ;
public class CompositeResultHandler { /** * Processes the specified result using all delegate result handlers . * @ param result Validation result to be handled . * @ see ResultHandler # handleResult ( Object ) */ @ Override public void handleResult ( RHI result ) { } }
for ( ResultHandler < RHI > resultHandler : resultHandlers ) { resultHandler . handleResult ( result ) ; }
public class IteratorHelper { /** * Retrieve the size of the passed { @ link Enumeration } . * @ param aEnumeration * Enumeration to check . May be < code > null < / code > . * @ return The number objects or 0 if the passed parameter is * < code > null < / code > . */ @ Nonnegative public static int getSize ( @ Nullable final Enumeration < ? > aEnumeration ) { } }
int ret = 0 ; if ( aEnumeration != null ) while ( aEnumeration . hasMoreElements ( ) ) { aEnumeration . nextElement ( ) ; ++ ret ; } return ret ;
public class ServletHttpResponse { public void setDateHeader ( String name , long value ) { } }
try { _httpResponse . setDateField ( name , value ) ; } catch ( IllegalStateException e ) { LogSupport . ignore ( log , e ) ; }
public class Symbol { /** * This method looks in the supertypes graph that has the current class as the * initial node , till it finds the current symbol or another symbol that hides it . * If the current class has more than one supertype ( extends one class and * implements one or more interfaces ) then null can be returned , meaning that * a wrong path in the supertypes graph was selected . Null can only be returned * as a temporary value , as a result of the recursive call . */ private Symbol hiddenInInternal ( ClassSymbol currentClass , Types types ) { } }
if ( currentClass == owner ) { return this ; } Scope . Entry e = currentClass . members ( ) . lookup ( name ) ; while ( e . scope != null ) { if ( e . sym . kind == kind && ( kind != MTH || ( e . sym . flags ( ) & STATIC ) != 0 && types . isSubSignature ( e . sym . type , type ) ) ) { return e . sym ; } e = e . next ( ) ; } Symbol hiddenSym = null ; for ( Type st : types . interfaces ( currentClass . type ) . prepend ( types . supertype ( currentClass . type ) ) ) { if ( st != null && ( st . hasTag ( CLASS ) ) ) { Symbol sym = hiddenInInternal ( ( ClassSymbol ) st . tsym , types ) ; if ( sym == this ) { return this ; } else if ( sym != null ) { hiddenSym = sym ; } } } return hiddenSym ;
public class PluginMessageDescription { /** * Create a description for an EventCondition object . * @ param condition the condition * @ return a description to be used on email templates */ public String events ( EventCondition condition ) { } }
String description = "event on: " + condition . getDataId ( ) ; if ( condition . getExpression ( ) != null ) { description += " [" + condition . getExpression ( ) + "]" ; } return description ;
public class MarkSet { /** * Marks { @ code type } , throwing if it is explicitly excluded , or if any of its members are also * specifically included . */ void root ( ProtoType type ) { } }
if ( type == null ) throw new NullPointerException ( "type == null" ) ; checkArgument ( ! identifierSet . excludes ( type ) ) ; checkArgument ( ! members . containsKey ( type ) ) ; types . add ( type ) ;
public class DynaFormRow { /** * Adds nested model with given colspan and rowspan . * @ param model * @ param colspan * @ param rowspan * @ return DynaFormModelElement added model */ public DynaFormModelElement addModel ( final DynaFormModel model , final int colspan , final int rowspan ) { } }
final DynaFormModelElement nestedModel = new DynaFormModelElement ( model , colspan , rowspan , row , elements . size ( ) + 1 , dynaFormModel . getControls ( ) . size ( ) + 1 , extended ) ; elements . add ( nestedModel ) ; dynaFormModel . getControls ( ) . addAll ( model . getControls ( ) ) ; totalColspan = totalColspan + colspan ; return nestedModel ;
public class PageWrapper { /** * Creates a new instance of the PageWrapper class for a new Page . * @ param page Page to wrap . * @ param parent Page ' s Parent . * @ param pointer Page Pointer . */ static PageWrapper wrapNew ( BTreePage page , PageWrapper parent , PagePointer pointer ) { } }
return new PageWrapper ( page , parent , pointer , true ) ;
public class HdfsFileVec { /** * This name is used by the DVecs to load data on - demand . */ public static Key make ( FileStatus f ) { } }
Futures fs = new Futures ( ) ; Key key = make ( f , fs ) ; fs . blockForPending ( ) ; return key ;
public class MOEADD { /** * update the parent population by using the ENLU method , instead of fast non - dominated sorting */ public void updateArchive ( S indiv ) { } }
// find the location of ' indiv ' setLocation ( indiv , idealPoint . getValues ( ) , nadirPoint . getValues ( ) ) ; int location = ( int ) indiv . getAttribute ( "region" ) ; numRanks = nondominated_sorting_add ( indiv ) ; if ( numRanks == 1 ) { deleteRankOne ( indiv , location ) ; } else { ArrayList < S > lastFront = new ArrayList < > ( populationSize ) ; int frontSize = countRankOnes ( numRanks - 1 ) ; if ( frontSize == 0 ) { // the last non - domination level only contains ' indiv ' frontSize ++ ; lastFront . add ( indiv ) ; } else { for ( int i = 0 ; i < populationSize ; i ++ ) { if ( rankIdx [ numRanks - 1 ] [ i ] == 1 ) { lastFront . add ( ( S ) population . get ( i ) ) ; } } if ( ( ( int ) indiv . getAttribute ( ranking . getAttributeIdentifier ( ) ) ) == ( numRanks - 1 ) ) { // if ( rankSolution . getOrDefault ( indiv , 0 ) = = ( numRanks - 1 ) ) { frontSize ++ ; lastFront . add ( indiv ) ; } } if ( frontSize == 1 && lastFront . get ( 0 ) . equals ( indiv ) ) { // the last non - domination level only has ' indiv ' int curNC = countOnes ( location ) ; if ( curNC > 0 ) { // if the subregion of ' indiv ' has other solution , drop ' indiv ' nondominated_sorting_delete ( indiv ) ; } else { // if the subregion of ' indiv ' has no solution , keep ' indiv ' deleteCrowdRegion1 ( indiv , location ) ; } } else if ( frontSize == 1 && ! lastFront . get ( 0 ) . equals ( indiv ) ) { // the last non - domination level only has one solution , but not ' indiv ' int targetIdx = findPosition ( lastFront . get ( 0 ) ) ; int parentLocation = findRegion ( targetIdx ) ; int curNC = countOnes ( parentLocation ) ; if ( parentLocation == location ) { curNC ++ ; } if ( curNC == 1 ) { // the subregion only has the solution ' targetIdx ' , keep solution ' targetIdx ' deleteCrowdRegion2 ( indiv , location ) ; } else { // the subregion contains some other solutions , drop solution ' targetIdx ' int indivRank = ( int ) indiv . getAttribute ( ranking . getAttributeIdentifier ( ) ) ; int targetRank = ( int ) population . get ( targetIdx ) . getAttribute ( ranking . getAttributeIdentifier ( ) ) ; rankIdx [ targetRank ] [ targetIdx ] = 0 ; rankIdx [ indivRank ] [ targetIdx ] = 1 ; S targetSol = population . get ( targetIdx ) ; replace ( targetIdx , indiv ) ; subregionIdx [ parentLocation ] [ targetIdx ] = 0 ; subregionIdx [ location ] [ targetIdx ] = 1 ; // update the non - domination level structure nondominated_sorting_delete ( targetSol ) ; } } else { double indivFitness = fitnessFunction ( indiv , lambda [ location ] ) ; // find the index of the solution in the last non - domination level , and its corresponding subregion int [ ] idxArray = new int [ frontSize ] ; int [ ] regionArray = new int [ frontSize ] ; for ( int i = 0 ; i < frontSize ; i ++ ) { idxArray [ i ] = findPosition ( lastFront . get ( i ) ) ; if ( idxArray [ i ] == - 1 ) { regionArray [ i ] = location ; } else { regionArray [ i ] = findRegion ( idxArray [ i ] ) ; } } // find the most crowded subregion , if more than one exist , keep them in ' crowdList ' ArrayList < Integer > crowdList = new ArrayList < > ( ) ; int crowdIdx ; int nicheCount = countOnes ( regionArray [ 0 ] ) ; if ( regionArray [ 0 ] == location ) { nicheCount ++ ; } crowdList . add ( regionArray [ 0 ] ) ; for ( int i = 1 ; i < frontSize ; i ++ ) { int curSize = countOnes ( regionArray [ i ] ) ; if ( regionArray [ i ] == location ) { curSize ++ ; } if ( curSize > nicheCount ) { crowdList . clear ( ) ; nicheCount = curSize ; crowdList . add ( regionArray [ i ] ) ; } else if ( curSize == nicheCount ) { crowdList . add ( regionArray [ i ] ) ; } } // find the index of the most crowded subregion if ( crowdList . size ( ) == 1 ) { crowdIdx = crowdList . get ( 0 ) ; } else { int listLength = crowdList . size ( ) ; crowdIdx = crowdList . get ( 0 ) ; double sumFitness = sumFitness ( crowdIdx ) ; if ( crowdIdx == location ) { sumFitness = sumFitness + indivFitness ; } for ( int i = 1 ; i < listLength ; i ++ ) { int curIdx = crowdList . get ( i ) ; double curFitness = sumFitness ( curIdx ) ; if ( curIdx == location ) { curFitness = curFitness + indivFitness ; } if ( curFitness > sumFitness ) { crowdIdx = curIdx ; sumFitness = curFitness ; } } } switch ( nicheCount ) { case 0 : System . out . println ( "Impossible empty subregion!!!" ) ; break ; case 1 : // if the subregion of each solution in the last non - domination level only has one solution , keep them all deleteCrowdRegion2 ( indiv , location ) ; break ; default : // delete the worst solution from the most crowded subregion in the last non - domination level ArrayList < Integer > list = new ArrayList < > ( ) ; for ( int i = 0 ; i < frontSize ; i ++ ) { if ( regionArray [ i ] == crowdIdx ) { list . add ( i ) ; } } if ( list . isEmpty ( ) ) { System . out . println ( "Cannot happen!!!" ) ; } else { double maxFitness , curFitness ; int targetIdx = list . get ( 0 ) ; if ( idxArray [ targetIdx ] == - 1 ) { maxFitness = indivFitness ; } else { maxFitness = fitnessFunction ( population . get ( idxArray [ targetIdx ] ) , lambda [ crowdIdx ] ) ; } for ( int i = 1 ; i < list . size ( ) ; i ++ ) { int curIdx = list . get ( i ) ; if ( idxArray [ curIdx ] == - 1 ) { curFitness = indivFitness ; } else { curFitness = fitnessFunction ( population . get ( idxArray [ curIdx ] ) , lambda [ crowdIdx ] ) ; } if ( curFitness > maxFitness ) { targetIdx = curIdx ; maxFitness = curFitness ; } } if ( idxArray [ targetIdx ] == - 1 ) { nondominated_sorting_delete ( indiv ) ; } else { // indiv . getRank ( ) ; int indivRank = ( int ) indiv . getAttribute ( ranking . getAttributeIdentifier ( ) ) ; // int targetRank = ( ( DoubleSolution ) population . get ( idxArray [ targetIdx ] ) ) . getRank ( ) ; int targetRank = ( int ) population . get ( idxArray [ targetIdx ] ) . getAttribute ( ranking . getAttributeIdentifier ( ) ) ; rankIdx [ targetRank ] [ idxArray [ targetIdx ] ] = 0 ; rankIdx [ indivRank ] [ idxArray [ targetIdx ] ] = 1 ; S targetSol = population . get ( idxArray [ targetIdx ] ) ; replace ( idxArray [ targetIdx ] , indiv ) ; subregionIdx [ crowdIdx ] [ idxArray [ targetIdx ] ] = 0 ; subregionIdx [ location ] [ idxArray [ targetIdx ] ] = 1 ; // update the non - domination level structure nondominated_sorting_delete ( targetSol ) ; } } break ; } } }
public class CsvReader { /** * Specifies the types for the CSV fields . This method parses the CSV data to a 2 - tuple * which has fields of the specified types . * This method is overloaded for each possible length of the tuples to support type safe * creation of data sets through CSV parsing . * @ param type0 The type of CSV field 0 and the type of field 0 in the returned tuple type . * @ param type1 The type of CSV field 1 and the type of field 1 in the returned tuple type . * @ return The { @ link eu . stratosphere . api . java . DataSet } representing the parsed CSV data . */ public < T0 , T1 > DataSource < Tuple2 < T0 , T1 > > types ( Class < T0 > type0 , Class < T1 > type1 ) { } }
TupleTypeInfo < Tuple2 < T0 , T1 > > types = TupleTypeInfo . getBasicTupleTypeInfo ( type0 , type1 ) ; CsvInputFormat < Tuple2 < T0 , T1 > > inputFormat = new CsvInputFormat < Tuple2 < T0 , T1 > > ( path ) ; configureInputFormat ( inputFormat , type0 , type1 ) ; return new DataSource < Tuple2 < T0 , T1 > > ( executionContext , inputFormat , types ) ;
public class ElementWithOptions { /** * Indicates if the element has an enabled value in any of its groups , that must also be enabled . * @ param value Value * @ return True if the value exists and is enabled , false otherwise */ public boolean hasValueEnabled ( String value ) { } }
for ( OptionGroup group : optionGroups . values ( ) ) { if ( group . hasValueEnabled ( value ) ) { return true ; } } return false ;
public class BosClient { /** * Gets the object content stored in Bos under the specified bucket and key . * @ param request The request object containing all the options on how to download the Bos object content . * @ return The object content stored in Bos in the specified bucket and key . */ public byte [ ] getObjectContent ( GetObjectRequest request ) { } }
BosObjectInputStream content = this . getObject ( request ) . getObjectContent ( ) ; try { return IOUtils . toByteArray ( content ) ; } catch ( IOException e ) { try { content . close ( ) ; } catch ( IOException e1 ) { // ignore , throw e not e1. } throw new BceClientException ( "Fail read object content" , e ) ; } finally { try { content . close ( ) ; } catch ( IOException e ) { // ignore } }
public class YearWeek { /** * from IsoFields in ThreeTen - Backport */ private static int weekRange ( int weekBasedYear ) { } }
LocalDate date = LocalDate . of ( weekBasedYear , 1 , 1 ) ; // 53 weeks if year starts on Thursday , or Wed in a leap year if ( date . getDayOfWeek ( ) == THURSDAY || ( date . getDayOfWeek ( ) == WEDNESDAY && date . isLeapYear ( ) ) ) { return 53 ; } return 52 ;
public class RxPresenter { /** * Returns a method that can be used for manual restartable chain build . It returns an Action1 that splits * a received { @ link Delivery } into two { @ link Action2 } onNext and onError calls . * @ param onNext a method that will be called if the delivery contains an emitted onNext value . * @ param onError a method that will be called if the delivery contains an onError throwable . * @ param < T > a type on onNext value . * @ return an Action1 that splits a received { @ link Delivery } into two { @ link Action2 } onNext and onError calls . */ public < T > Action1 < Delivery < View , T > > split ( final Action2 < View , T > onNext , @ Nullable final Action2 < View , Throwable > onError ) { } }
return new Action1 < Delivery < View , T > > ( ) { @ Override public void call ( Delivery < View , T > delivery ) { delivery . split ( onNext , onError ) ; } } ;
public class CoverageUtil { /** * TODO : Comment */ public LexNameList getSpanNames ( File filename ) { } }
LexNameList list = new LexNameList ( ) ; for ( LexNameToken name : nameSpans . keySet ( ) ) { ILexLocation span = nameSpans . get ( name ) ; if ( span . getFile ( ) . equals ( filename ) ) { list . add ( name ) ; } } return list ;
public class ClassDef { /** * Write this class file to the specified output stream . */ public void write ( Output dest ) throws IOException { } }
int i , max ; dest . writeU2 ( Access . toFlags ( accessFlags ) ) ; dest . writeClassRef ( thisClass ) ; if ( superClass != null ) { dest . writeClassRef ( superClass ) ; } else { dest . writeU2 ( 0 ) ; } max = interfaces . size ( ) ; dest . writeU2 ( max ) ; for ( i = 0 ; i < max ; i ++ ) { dest . writeClassRef ( interfaces . get ( i ) ) ; } max = fields . size ( ) ; dest . writeU2 ( max ) ; for ( i = 0 ; i < max ; i ++ ) { fields . get ( i ) . write ( dest ) ; } max = methods . size ( ) ; dest . writeU2 ( max ) ; for ( i = 0 ; i < max ; i ++ ) { methods . get ( i ) . write ( dest ) ; } max = attributes . size ( ) ; dest . writeU2 ( max ) ; for ( i = 0 ; i < max ; i ++ ) { attributes . get ( i ) . write ( dest ) ; }
public class ColumnListEditor { /** * Set the column that cannot be removed from the list , as it ' s used by the filter . * @ param value The column id . */ @ Override public void onValueRestricted ( final String value ) { } }
this . restrictedColumns . add ( value ) ; setEditorEnabled ( value , false , DataSetEditorConstants . INSTANCE . columnIsUsedInFilter ( ) ) ;
public class AbstractProgramTwillRunnable { /** * TODO ( terence ) make this works for different mode */ protected Module createModule ( final TwillContext context ) { } }
return Modules . combine ( new ConfigModule ( cConf , hConf ) , new IOModule ( ) , new ZKClientModule ( ) , new MetricsClientRuntimeModule ( ) . getDistributedModules ( ) , new LocationRuntimeModule ( ) . getDistributedModules ( ) , new DiscoveryRuntimeModule ( ) . getDistributedModules ( ) , new DataFabricModules ( ) . getDistributedModules ( ) , new AbstractModule ( ) { @ Override protected void configure ( ) { // For Binding queue stuff bind ( QueueReaderFactory . class ) . in ( Scopes . SINGLETON ) ; // For program loading install ( createProgramFactoryModule ( ) ) ; // For binding DataSet transaction stuff install ( new DataFabricFacadeModule ( ) ) ; bind ( ServiceAnnouncer . class ) . toInstance ( new ServiceAnnouncer ( ) { @ Override public Cancellable announce ( String serviceName , int port ) { return context . announce ( serviceName , port ) ; } } ) ; } } ) ;
public class ServiceLoaderProcessor { /** * { @ inheritDoc } */ public void deploy ( final DeploymentPhaseContext phaseContext ) throws DeploymentUnitProcessingException { } }
final Map < String , List < String > > foundServices = new HashMap < String , List < String > > ( ) ; final DeploymentUnit deploymentUnit = phaseContext . getDeploymentUnit ( ) ; final ResourceRoot deploymentRoot = deploymentUnit . getAttachment ( Attachments . DEPLOYMENT_ROOT ) ; if ( deploymentRoot != null ) { processRoot ( deploymentRoot , foundServices ) ; } final List < ResourceRoot > resourceRoots = deploymentUnit . getAttachmentList ( Attachments . RESOURCE_ROOTS ) ; for ( ResourceRoot resourceRoot : resourceRoots ) { if ( ! SubDeploymentMarker . isSubDeployment ( resourceRoot ) && ModuleRootMarker . isModuleRoot ( resourceRoot ) ) processRoot ( resourceRoot , foundServices ) ; } deploymentUnit . putAttachment ( Attachments . SERVICES , new ServicesAttachment ( foundServices ) ) ;
public class IndexInfos { /** * Removes the name from the index infos . * @ param name the name to remove . */ public void removeName ( String name ) { } }
indexes . remove ( name ) ; names . remove ( name ) ; dirty = true ;
public class SVGPath { /** * Cubic Bezier line to the given relative coordinates . * @ param c1xy first control point * @ param c2xy second control point * @ param xy new coordinates * @ return path object , for compact syntax . */ public SVGPath relativeCubicTo ( double [ ] c1xy , double [ ] c2xy , double [ ] xy ) { } }
return append ( PATH_CUBIC_TO_RELATIVE ) . append ( c1xy [ 0 ] ) . append ( c1xy [ 1 ] ) . append ( c2xy [ 0 ] ) . append ( c2xy [ 1 ] ) . append ( xy [ 0 ] ) . append ( xy [ 1 ] ) ;
public class CPOptionPersistenceImpl { /** * Returns a range of all the cp options . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPOptionModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of cp options * @ param end the upper bound of the range of cp options ( not inclusive ) * @ return the range of cp options */ @ Override public List < CPOption > findAll ( int start , int end ) { } }
return findAll ( start , end , null ) ;
public class GeoJSONParser { /** * Parses a GeoJSON document into an array of { @ link JTSFeature } * @ param geoJSON * The GeoJSON content * @ return An array of { @ link JTSFeature } * @ throws JSONException */ public ArrayList < JTSFeature > parse ( String geoJSON ) throws JSONException { } }
ArrayList < JTSFeature > features = new ArrayList < JTSFeature > ( ) ; JSONObject json = new JSONObject ( geoJSON ) ; JSONArray feats = json . getJSONArray ( "features" ) ; JSONObject feature ; JTSFeature feat ; final int size = feats . length ( ) ; for ( int i = 0 ; i < size ; i ++ ) { feature = feats . getJSONObject ( i ) ; feat = parseFeature ( feature ) ; features . add ( feat ) ; } return features ;
public class SubPathTree { /** * Expose a resource with a path that maps to external path * @ param resource * @ return */ private Resource < T > translateResourceExternal ( Resource < T > resource ) { } }
if ( fullPath ) { return resource ; } return new translatedResource < T > ( resource , translatePathExternal ( resource . getPath ( ) ) ) ;
public class DeleteResponseUnmarshaller { /** * { @ inheritDoc } */ @ Override public Object unMarshall ( Response < DeleteResponse > response , Object entity ) { } }
return unMarshall ( response , entity . getClass ( ) ) ;
public class AddOn { /** * Tells whether or not the given { @ code extension } has a ( direct ) dependency on the given { @ code addOn } ( including * version ) . * @ param extension the extension that will be checked * @ param addOn the add - on that will be checked in the dependencies on the extension * @ return { @ code true } if the extension depends on the given add - on , { @ code false } otherwise . * @ since 2.4.0 */ public boolean dependsOn ( Extension extension , AddOn addOn ) { } }
String classname = extension . getClass ( ) . getCanonicalName ( ) ; for ( ExtensionWithDeps extensionWithDeps : extensionsWithDeps ) { if ( extensionWithDeps . getClassname ( ) . equals ( classname ) ) { return dependsOn ( extensionWithDeps . getDependencies ( ) , addOn ) ; } } return false ;
public class Snapshot { /** * / * ( non - Javadoc ) * @ see java . util . Comparator # compare ( java . lang . Object , java . lang . Object ) */ @ Override public int compare ( Snapshot o1 , Snapshot o2 ) { } }
return o1 . name . compareTo ( o2 . name ) ;
public class ClientService { /** * Get and refresh the details of an existing PAYMILL { @ link Client } . * @ param client * A { @ link Client } with Id . * @ return Refreshed instance of the given { @ link Client } . */ public Client get ( Client client ) { } }
return RestfulUtils . show ( ClientService . PATH , client , Client . class , super . httpClient ) ;
public class AbstractTreeWriter { /** * Add each level of the class tree . For each sub - class or * sub - interface indents the next level information . * Recurses itself to add subclasses info . * @ param parent the superclass or superinterface of the list * @ param list list of the sub - classes at this level * @ param isEnum true if we are generating a tree for enums * @ param contentTree the content tree to which the level information will be added */ protected void addLevelInfo ( ClassDoc parent , List < ClassDoc > list , boolean isEnum , Content contentTree ) { } }
int size = list . size ( ) ; if ( size > 0 ) { Content ul = new HtmlTree ( HtmlTag . UL ) ; for ( int i = 0 ; i < size ; i ++ ) { ClassDoc local = list . get ( i ) ; HtmlTree li = new HtmlTree ( HtmlTag . LI ) ; li . addAttr ( HtmlAttr . TYPE , LI_CIRCLE ) ; addPartialInfo ( local , li ) ; addExtendsImplements ( parent , local , li ) ; addLevelInfo ( local , classtree . subs ( local , isEnum ) , isEnum , li ) ; // Recurse ul . addContent ( li ) ; } contentTree . addContent ( ul ) ; }
public class DictionaryMaker { /** * 插入条目 , 但是不合并 , 如果已有则忽略 * @ param item */ public void addNotCombine ( Item item ) { } }
Item innerItem = trie . get ( item . key ) ; if ( innerItem == null ) { innerItem = item ; trie . put ( innerItem . key , innerItem ) ; }
public class AbstractRule { /** * Create and return a new Violation for this rule and the specified import className and alias * @ param sourceCode - the SourceCode * @ param className - the class name ( as specified within the import statement ) * @ param alias - the alias for the import statement * @ param violationMessage - the violation message ; may be null * @ return a new Violation object */ protected Violation createViolationForImport ( SourceCode sourceCode , String className , String alias , String violationMessage ) { } }
Map importInfo = ImportUtil . sourceLineAndNumberForImport ( sourceCode , className , alias ) ; Violation violation = new Violation ( ) ; violation . setRule ( this ) ; violation . setSourceLine ( ( String ) importInfo . get ( "sourceLine" ) ) ; violation . setLineNumber ( ( Integer ) importInfo . get ( "lineNumber" ) ) ; violation . setMessage ( violationMessage ) ; return violation ;
public class Element { /** * Captures an image of the element , and returns the html friendly link of * it for use in the logging file . If there is a problem capturing the * image , an error message is returned instead . * @ return String the location of the screenshot */ private String getScreenshot ( ) { } }
WebElement webElement = getWebElement ( ) ; String imageLink = "<b><font class='fail'>No Image Preview</font></b>" ; // capture an image of it try { imageLink = reporter . captureEntirePageScreenshot ( ) ; File image = new File ( reporter . getDirectory ( ) , imageLink . split ( "\"" ) [ 1 ] ) ; BufferedImage fullImg = ImageIO . read ( image ) ; // Get the location of element on the page org . openqa . selenium . Point point = webElement . getLocation ( ) ; // Get width and height of the element int eleWidth = webElement . getSize ( ) . getWidth ( ) ; int eleHeight = webElement . getSize ( ) . getHeight ( ) ; // Crop the entire page screenshot to get only element screenshot BufferedImage eleScreenshot = fullImg . getSubimage ( point . getX ( ) , point . getY ( ) , eleWidth , eleHeight ) ; ImageIO . write ( eleScreenshot , "png" , image ) ; } catch ( WebDriverException | RasterFormatException | IOException e ) { log . error ( e ) ; } return imageLink ;
public class JoinedStreams { /** * Specifies a { @ link KeySelector } for elements from the first input with explicit type information for the key type . * @ param keySelector The KeySelector to be used for extracting the first input ' s key for partitioning . * @ param keyType The type information describing the key type . */ public < KEY > Where < KEY > where ( KeySelector < T1 , KEY > keySelector , TypeInformation < KEY > keyType ) { } }
requireNonNull ( keySelector ) ; requireNonNull ( keyType ) ; return new Where < > ( input1 . clean ( keySelector ) , keyType ) ;
public class MtasFieldsConsumer { /** * Gets the prefix stats intersection prefix attribute . * @ param field * the field * @ return the prefix stats intersection prefix attribute */ public String getPrefixStatsIntersectionPrefixAttribute ( String field ) { } }
if ( intersectingPrefixes . containsKey ( field ) ) { return String . join ( MtasToken . DELIMITER , intersectingPrefixes . get ( field ) ) ; } else { return "" ; }
public class AttachmentDeserializer { /** * ( non - Javadoc ) * @ see * com . google . gson . JsonDeserializer # deserialize ( com . google . gson . JsonElement , * java . lang . reflect . Type , com . google . gson . JsonDeserializationContext ) */ public Attachment deserialize ( JsonElement json , Type typeOfT , JsonDeserializationContext context ) throws JsonParseException { } }
Attachment attachment = delegateGson . fromJson ( json , Attachment . class ) ; AttachmentType type = attachment . getType ( ) ; Class < ? extends Payload > payloadClass = null ; JsonElement payloadJson = json . getAsJsonObject ( ) . get ( "payload" ) ; switch ( type ) { case AUDIO : case FILE : case IMAGE : case VIDEO : payloadClass = UrlPayload . class ; break ; case LOCATION : payloadClass = QuickReplyLocationPayload . class ; break ; case FALLBACK : // In case of Fallback attachment the payload will be null so I do // nothing . break ; case TEMPLATE : // In case of a template I need to check which one to instantiate . String payloadTypeString = payloadJson . getAsJsonObject ( ) . get ( "template_type" ) . getAsString ( ) ; PayloadType templateType = PayloadType . valueOf ( payloadTypeString . toUpperCase ( ) ) ; switch ( templateType ) { case AIRLINE_BOARDINGPASS : payloadClass = AirlineBoardingPassTemplatePayload . class ; break ; case AIRLINE_CHECKIN : payloadClass = AirlineCheckinTemplatePayload . class ; break ; case AIRLINE_ITINERARY : payloadClass = AirlineItineraryTemplatePayload . class ; break ; case AIRLINE_UPDATE : payloadClass = AirlineFlightUpdateTemplatePayload . class ; break ; case BUTTON : payloadClass = ButtonTemplatePayload . class ; break ; case GENERIC : payloadClass = GenericTemplatePayload . class ; break ; case LIST : payloadClass = ListTemplatePayload . class ; break ; case RECEIPT : payloadClass = ReceiptTemplatePayload . class ; break ; } break ; } Payload payload = context . deserialize ( payloadJson , payloadClass ) ; attachment . setPayload ( payload ) ; return attachment ;
public class IRBuilderMethods { /** * - - - - - Array Store Statement - - - - - */ public static IRArrayStoreStatementBuilder arrayStore ( IRExpressionBuilder target , IRExpressionBuilder index , IRExpressionBuilder value ) { } }
IRArrayStoreStatementBuilder statement = new IRArrayStoreStatementBuilder ( target , index , value ) ; return statement ;
public class JdbcConnectionDescriptor { /** * Sets the jdbcLevel . parse the string setting and check that it is indeed an integer . * @ param jdbcLevel The jdbcLevel to set */ public void setJdbcLevel ( String jdbcLevel ) { } }
if ( jdbcLevel != null ) { try { double intLevel = Double . parseDouble ( jdbcLevel ) ; setJdbcLevel ( intLevel ) ; } catch ( NumberFormatException nfe ) { setJdbcLevel ( 2.0 ) ; logger . info ( "Specified JDBC level was not numeric (Value=" + jdbcLevel + "), used default jdbc level of 2.0 " ) ; } } else { setJdbcLevel ( 2.0 ) ; logger . info ( "Specified JDBC level was null, used default jdbc level of 2.0 " ) ; }
public class HBaseClientTemplate { /** * Execute an increment on an entity field . This field must be a type that * supports increments . Returns the new increment value of type long . * @ param key * The key to map to an Increment * @ param fieldName * The name of the field we are incrementing * @ param amount * The amount to increment by * @ param entityMapper * The EntityMapper to map the key and increment amount to an * Increment . * @ return The new field amount after the increment . */ public < E > long increment ( PartitionKey key , String fieldName , long amount , EntityMapper < E > entityMapper ) { } }
Increment increment = entityMapper . mapToIncrement ( key , fieldName , amount ) ; HTableInterface table = pool . getTable ( tableName ) ; Result result ; try { result = table . increment ( increment ) ; } catch ( IOException e ) { throw new DatasetIOException ( "Error incrementing field." , e ) ; } return entityMapper . mapFromIncrementResult ( result , fieldName ) ;
public class CachedCounters { /** * Calculates min / average / max statistics based on the current and previous * values . * @ param name a counter name of Statistics type * @ param value a value to update statistics */ public void stats ( String name , float value ) { } }
Counter counter = get ( name , CounterType . Statistics ) ; calculateStats ( counter , value ) ; update ( ) ;
public class DocumentBuilderImpl { /** * Helper method to load XML document from input source . * @ param source input source , * @ param useNamespace flag to control name space awareness . * @ return newly created XML document . */ private static Document loadXML ( InputSource source , boolean useNamespace ) { } }
try { org . w3c . dom . Document doc = getDocumentBuilder ( null , useNamespace ) . parse ( source ) ; return new DocumentImpl ( doc ) ; } catch ( Exception e ) { throw new DomException ( e ) ; } finally { close ( source ) ; }
public class CrystalBuilder { /** * Calculate interfaces between original asymmetric unit and neighboring * whole unit cells , including the original full unit cell i . e . i = 0 , j = 0 , k = 0 * @ param set * @ param cutoff */ private void calcInterfacesCrystal ( StructureInterfaceList set , double cutoff ) { } }
// initialising debugging vars long start = - 1 ; long end = - 1 ; int trialCount = 0 ; int skippedRedundant = 0 ; int skippedAUsNoOverlap = 0 ; int skippedChainsNoOverlap = 0 ; int skippedSelfEquivalent = 0 ; // The bounding boxes of all AUs of the unit cell UnitCellBoundingBox bbGrid = new UnitCellBoundingBox ( numOperatorsSg , numPolyChainsAu ) ; ; // we calculate all the bounds of each of the asym units , those will then be reused and translated bbGrid . setBbs ( structure , ops , INCLUDE_HETATOMS ) ; // if not crystallographic there ' s no search to do in other cells , only chains within " AU " will be checked if ( ! searchBeyondAU ) numCells = 0 ; boolean verbose = logger . isDebugEnabled ( ) ; if ( verbose ) { trialCount = 0 ; start = System . currentTimeMillis ( ) ; int neighbors = ( 2 * numCells + 1 ) * ( 2 * numCells + 1 ) * ( 2 * numCells + 1 ) - 1 ; int auTrials = ( numPolyChainsAu * ( numPolyChainsAu - 1 ) ) / 2 ; int trials = numPolyChainsAu * numOperatorsSg * numPolyChainsAu * neighbors ; logger . debug ( "Chain clash trials within original AU: " + auTrials ) ; logger . debug ( "Chain clash trials between the original AU and the neighbouring " + neighbors + " whole unit cells (" + numCells + " neighbours)" + "(2x" + numPolyChainsAu + "chains x " + numOperatorsSg + "AUs x " + neighbors + "cells) : " + trials ) ; logger . debug ( "Total trials: " + ( auTrials + trials ) ) ; } List < Chain > polyChains = structure . getPolyChains ( ) ; for ( int a = - numCells ; a <= numCells ; a ++ ) { for ( int b = - numCells ; b <= numCells ; b ++ ) { for ( int c = - numCells ; c <= numCells ; c ++ ) { Point3i trans = new Point3i ( a , b , c ) ; Vector3d transOrth = new Vector3d ( a , b , c ) ; if ( a != 0 || b != 0 || c != 0 ) { // we avoid doing the transformation for 0,0,0 ( in case it ' s not crystallographic ) this . crystallographicInfo . getCrystalCell ( ) . transfToOrthonormal ( transOrth ) ; } UnitCellBoundingBox bbGridTrans = bbGrid . getTranslatedBbs ( transOrth ) ; for ( int n = 0 ; n < numOperatorsSg ; n ++ ) { // short - cut strategies // 1 ) we skip first of all if the bounding boxes of the AUs don ' t overlap if ( ! bbGrid . getAuBoundingBox ( 0 ) . overlaps ( bbGridTrans . getAuBoundingBox ( n ) , cutoff ) ) { skippedAUsNoOverlap ++ ; continue ; } // 2 ) we check if we didn ' t already see its equivalent symmetry operator partner CrystalTransform tt = new CrystalTransform ( this . crystallographicInfo . getSpaceGroup ( ) , n ) ; tt . translate ( trans ) ; if ( isRedundantTransform ( tt ) ) { skippedRedundant ++ ; continue ; } addVisitedTransform ( tt ) ; boolean selfEquivalent = false ; // 3 ) an operator can be " self redundant " if it is the inverse of itself ( involutory , e . g . all pure 2 - folds with no translation ) if ( tt . isEquivalent ( tt ) ) { logger . debug ( "Transform {} is equivalent to itself, will skip half of i-chains to j-chains comparisons" , tt . toString ( ) ) ; // in this case we can ' t skip the operator , but we can skip half of the matrix comparisons e . g . j > i // we set a flag and do that within the loop below selfEquivalent = true ; } StringBuilder builder = null ; if ( verbose ) builder = new StringBuilder ( String . valueOf ( tt ) ) . append ( " " ) ; // Now that we know that boxes overlap and operator is not redundant , we have to go to the details int contactsFound = 0 ; for ( int j = 0 ; j < numPolyChainsAu ; j ++ ) { for ( int i = 0 ; i < numPolyChainsAu ; i ++ ) { // we only have to compare the original asymmetric unit to every full cell around if ( selfEquivalent && ( j > i ) ) { // in case of self equivalency of the operator we can safely skip half of the matrix skippedSelfEquivalent ++ ; continue ; } // special case of original AU , we don ' t compare a chain to itself if ( n == 0 && a == 0 && b == 0 && c == 0 && i == j ) continue ; // before calculating the AtomContactSet we check for overlap , then we save putting atoms into the grid if ( ! bbGrid . getChainBoundingBox ( 0 , i ) . overlaps ( bbGridTrans . getChainBoundingBox ( n , j ) , cutoff ) ) { skippedChainsNoOverlap ++ ; if ( verbose ) { builder . append ( "." ) ; } continue ; } trialCount ++ ; // finally we ' ve gone through all short - cuts and the 2 chains seem to be close enough : // we do the calculation of contacts Chain chaini = polyChains . get ( i ) ; Chain chainj = polyChains . get ( j ) ; if ( n != 0 || a != 0 || b != 0 || c != 0 ) { Matrix4d mJCryst = new Matrix4d ( ops [ n ] ) ; translate ( mJCryst , transOrth ) ; chainj = ( Chain ) chainj . clone ( ) ; Calc . transform ( chainj , mJCryst ) ; } StructureInterface interf = calcContacts ( chaini , chainj , cutoff , tt , builder ) ; if ( interf == null ) { continue ; } contactsFound ++ ; if ( this . hasNcsOps ( ) ) { StructureInterface interfNcsRef = findNcsRef ( interf ) ; set . addNcsEquivalent ( interf , interfNcsRef ) ; } else { set . add ( interf ) ; } } } if ( verbose ) { if ( a == 0 && b == 0 && c == 0 && n == 0 ) builder . append ( " " + contactsFound + "(" + ( numPolyChainsAu * ( numPolyChainsAu - 1 ) ) / 2 + ")" ) ; else if ( selfEquivalent ) builder . append ( " " + contactsFound + "(" + ( numPolyChainsAu * ( numPolyChainsAu + 1 ) ) / 2 + ")" ) ; else builder . append ( " " + contactsFound + "(" + numPolyChainsAu * numPolyChainsAu + ")" ) ; logger . debug ( builder . toString ( ) ) ; } } } } } end = System . currentTimeMillis ( ) ; logger . debug ( "\n" + trialCount + " chain-chain clash trials done. Time " + ( end - start ) / 1000 + "s" ) ; logger . debug ( " skipped (not overlapping AUs) : " + skippedAUsNoOverlap ) ; logger . debug ( " skipped (not overlapping chains) : " + skippedChainsNoOverlap ) ; logger . debug ( " skipped (sym redundant op pairs) : " + skippedRedundant ) ; logger . debug ( " skipped (sym redundant self op) : " + skippedSelfEquivalent ) ; logger . debug ( "Found " + set . size ( ) + " interfaces." ) ;
public class ArrayUtils { /** * Determines the intersection of the given arrays . < br > * Note : Only use this method when the given arrays are sorted and contain * only distinct values . * @ param arrs * @ return */ public static short [ ] intersectionSorted ( short [ ] ... arrs ) { } }
if ( arrs . length == 0 ) return new short [ 0 ] ; if ( arrs . length == 1 ) return arrs [ 0 ] ; short [ ] [ ] arrList = new short [ arrs . length - 1 ] [ ] ; short [ ] minLengthArray = arrs [ 0 ] ; for ( int i = 1 ; i < arrs . length ; i ++ ) { short [ ] arr = arrs [ i ] ; if ( arr . length < minLengthArray . length ) { arrList [ i - 1 ] = minLengthArray ; minLengthArray = arr ; } else { arrList [ i - 1 ] = arr ; } } // System . out . println ( " - - " ) ; // System . out . println ( Arrays . toString ( minLengthArray ) ) ; // for ( short [ ] otherArr : arrList ) { // System . out . println ( Arrays . toString ( otherArr ) ) ; // System . out . println ( " - - " ) ; short [ ] pointer = ArrayUtils . createArray ( arrs . length - 1 , ( short ) 0 ) ; List < Short > commonIndices = new ArrayList < > ( minLengthArray . length ) ; for ( short i = 0 ; i < minLengthArray . length ; i ++ ) { short stateIndex = minLengthArray [ i ] ; boolean insert = true ; for ( short j = 0 ; j < pointer . length ; j ++ ) { for ( short k = pointer [ j ] ; k < arrList [ j ] . length ; k ++ ) { if ( stateIndex < arrList [ j ] [ k ] ) { // Array does not contain the state - index break ; } else if ( stateIndex > arrList [ j ] [ k ] ) { if ( k < arrList [ j ] . length - 1 ) { pointer [ j ] = ( short ) ( pointer [ j ] + 1 ) ; } else { break ; } } else { break ; } } if ( arrList [ j ] [ pointer [ j ] ] != stateIndex ) { insert = false ; break ; } } if ( insert ) { commonIndices . add ( stateIndex ) ; } } short [ ] result = new short [ commonIndices . size ( ) ] ; for ( int l = 0 ; l < commonIndices . size ( ) ; l ++ ) { result [ l ] = commonIndices . get ( l ) ; } return result ;
public class PropertyExtractor { /** * { @ inheritDoc } */ public T extract ( Object target ) { } }
if ( target == null ) { return null ; } Class targetClass = target . getClass ( ) ; try { if ( propertyAccessor == null || propertyAccessor . getDeclaringClass ( ) != targetClass ) { propertyAccessor = findReadMethod ( propertyName , target . getClass ( ) ) ; } if ( propertyAccessor != null ) { return ( T ) propertyAccessor . invoke ( target ) ; } else { throw new RuntimeException ( "Readable property " + propertyName + " does not exist in the class " + targetClass ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class RasterableModel { /** * Rasterable */ @ Override public void prepare ( FeatureProvider provider ) { } }
super . prepare ( provider ) ; transformable = provider . getFeature ( Transformable . class ) ; mirrorable = provider . getFeature ( Mirrorable . class ) ; animator = provider . getFeature ( Animatable . class ) ;
public class PMContext { /** * Creates a new instance wrapper with the instance and the instanceId . */ public EntityInstanceWrapper buildInstanceWrapper ( final Object instance ) throws PMException { } }
final EntityInstanceWrapper wrapper = new EntityInstanceWrapper ( instance ) ; if ( hasEntity ( ) && ! getEntityContainer ( ) . isSelectedNew ( ) ) { wrapper . setInstanceId ( getDataAccess ( ) . getInstanceId ( this , wrapper ) ) ; } return wrapper ;
public class IntArrayList { /** * Removes a single instance of the specified element from this * list , if it is present ( optional operation ) . More formally , * removes an element < tt > e < / tt > such that < tt > ( o = = null ? e = = null : * o . equals ( e ) ) < / tt > , if the list contains one or more such * elements . Returns < tt > true < / tt > if the list contained the * specified element ( or equivalently , if the list changed as a * result of the call ) . < p > * @ param value element to be removed from this list , if present . * @ return < tt > true < / tt > if the list contained the specified element . */ @ Override public boolean removeByValue ( int value ) { } }
for ( int index = 0 ; index < size ; index ++ ) if ( value == elementData [ index ] ) { fastRemove ( index ) ; return true ; } return false ;
public class DatabaseHashMapMR { /** * Initialize SQL Strings */ protected void initializeSQL_Strings ( ) { } }
super . initializeSQL_Strings ( ) ; findProps = "select propid, small, medium, large from " + tableName + " where id = ? and propid <> ? and appname = ?" ; findAllKeys = "select propid from " + tableName + " where id = ? and propid <> id and appname = ?" ; // upSmProp = " update " + tableName + " set small = ? , medium = NULL , large = NULL where id = ? and propid = ? and appname = ? " ; // upMedProp = " update " + tableName + " set small = NULL , medium = ? , large = NULL where id = ? and propid = ? and appname = ? " ; // upLgProp = " update " + tableName + " set small = NULL , medium = NULL , large = ? where id = ? and propid = ? and appname = ? " ; upAnyProp = "update " + tableName + " set small = ?, medium = ?, large = ? where id = ? and propid = ? and appname = ?" ; insAnyProp = "insert into " + tableName + " (id, propid, small, medium, large, appname) values (?, ?, ?, ?, ?, ?)" ; insNoProp = "insert into " + tableName + " (id, propid, appname, listenercnt, lastaccess, creationtime, maxinactivetime, username) values (?, ?, ?, ?, ?, ?, ?, ?)" ; findProps = "select propid, small, medium, large from " + tableName + " where id = ? and propid <> ? and appname = ? " ; // * dbc2.2
public class OfflineChangePointDetectionAlgorithm { /** * Compute the incremental sum of an array , i . e . the sum of all points up to * the given index . * @ param data Input data * @ param out Output array ( must be large enough ) . */ public static void cusum ( double [ ] data , double [ ] out , int begin , int end ) { } }
assert ( out . length >= data . length ) ; // Use Kahan summation for better precision ! // FIXME : this should be unit tested . double m = 0. , carry = 0. ; for ( int i = begin ; i < end ; i ++ ) { double v = data [ i ] - carry ; // Compensation double n = out [ i ] = ( m + v ) ; // May lose small digits of v . carry = ( n - m ) - v ; // Recover lost bits m = n ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcBuildingElementComponent ( ) { } }
if ( ifcBuildingElementComponentEClass == null ) { ifcBuildingElementComponentEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 56 ) ; } return ifcBuildingElementComponentEClass ;
public class HashObjectStore { /** * / * ( non - Javadoc ) * @ see org . jboss . arquillian . core . impl . ObjectStore # get ( java . lang . Class ) */ @ Override public < T > T get ( Class < T > type ) { } }
Validate . notNull ( type , "Type must be specified" ) ; return type . cast ( store . get ( type ) ) ;
public class MDRRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . MDRRG__RG_LENGTH : setRGLength ( ( Integer ) newValue ) ; return ; case AfplibPackage . MDRRG__TRIPLETS : getTriplets ( ) . clear ( ) ; getTriplets ( ) . addAll ( ( Collection < ? extends Triplet > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class ZipUtil { /** * Compresses the given directory and all its sub - directories into a ZIP file . * The ZIP file must not be a directory and its parent directory must exist . * Will not include the root directory name in the archive . * @ param rootDir * root directory . * @ param zip * ZIP file that will be created or overwritten . * @ param compressionLevel * compression level */ public static void pack ( File rootDir , File zip , int compressionLevel ) { } }
pack ( rootDir , zip , IdentityNameMapper . INSTANCE , compressionLevel ) ;
public class AccountsInner { /** * Get usages for the requested Cognitive Services account . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param accountName The name of Cognitive Services account . * @ param filter An OData filter expression that describes a subset of usages to return . The supported parameter is name . value ( name of the metric , can have an or of multiple names ) . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the UsagesResultInner object if successful . */ public UsagesResultInner getUsages ( String resourceGroupName , String accountName , String filter ) { } }
return getUsagesWithServiceResponseAsync ( resourceGroupName , accountName , filter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Identity { /** * Compare two DSN * @ param dsn1 Distinguished name ( X . 500 DSN ) string * @ param dsn2 Distinguished name ( X . 500 DSN ) string * @ return boolean true if both DSN are equal * @ since 0.1.5 */ private boolean distinguishedNameEquals ( String dsn1 , String dsn2 ) { } }
return new X500Principal ( dsn1 ) . equals ( new X500Principal ( dsn2 ) ) ;
public class DRealHistogram { /** * Big allocation of arrays */ @ Override void init0 ( ) { } }
_sums = MemoryManager . malloc8d ( _nbin ) ; _ssqs = MemoryManager . malloc8d ( _nbin ) ;
public class RunningWorkers { /** * Concurrency : Called by multiple threads . * Parameter : Same taskletId can come in multiple times . */ void cancelTasklet ( final boolean mayInterruptIfRunning , final int taskletId ) { } }
lock . lock ( ) ; try { // This is not ideal since we are using a linear time search on all the workers . final String workerId = getWhereTaskletWasScheduledTo ( taskletId ) ; if ( workerId == null ) { // launchTasklet called but not yet running . taskletsToCancel . add ( taskletId ) ; return ; } if ( mayInterruptIfRunning ) { LOG . log ( Level . FINE , "Cancelling running Tasklet with ID {0}." , taskletId ) ; runningWorkers . get ( workerId ) . cancelTasklet ( taskletId ) ; } } finally { lock . unlock ( ) ; }
public class XmlTransformer { /** * Transforms the XML node into the string * @ param node * the node to transform * @ param builder * the string builder */ public void transform ( XmlNode node , StringBuilder builder ) { } }
switch ( node . getType ( ) ) { case XmlNode . ELEMENT_NODE : { builder . append ( "<" ) ; builder . append ( node . getName ( ) ) ; for ( XmlNode attribute : node . getAttributes ( ) . values ( ) ) { transform ( attribute , builder ) ; } builder . append ( ">" ) ; for ( XmlNode child : node . getChildren ( ) ) { transform ( child , builder ) ; } builder . append ( "</" ) ; builder . append ( node . getName ( ) ) ; builder . append ( ">" ) ; } break ; case XmlNode . ATTRIBUTE_NODE : { builder . append ( " " ) ; builder . append ( node . getName ( ) ) ; builder . append ( "=\"" ) ; builder . append ( node . getValue ( ) ) ; builder . append ( "\"" ) ; } break ; case XmlNode . TEXT_NODE : { builder . append ( node . getValue ( ) ) ; } }
public class ClassScanner { /** * Find all classes that match the class name pattern . * @ param classNamePattern the class name pattern * @ return a Map for scanned classes * @ throws IOException if an I / O error has occurred */ public Map < String , Class < ? > > scan ( String classNamePattern ) throws IOException { } }
final Map < String , Class < ? > > scannedClasses = new LinkedHashMap < > ( ) ; scan ( classNamePattern , scannedClasses ) ; return scannedClasses ;
public class JobClient { /** * Wait for the job driver to complete . */ public void waitForCompletion ( final int waitTime ) { } }
LOG . info ( "Waiting for the Job Driver to complete: " + waitTime ) ; if ( waitTime == 0 ) { close ( 0 ) ; return ; } else if ( waitTime < 0 ) { waitTillDone ( ) ; } final long endTime = System . currentTimeMillis ( ) + waitTime * 1000 ; close ( endTime ) ;
public class CallbackWrapper { /** * Warning : this will not be called on error or timeout * @ param resultHandler * @ return */ @ Override public IPromise < T > onResult ( Consumer < T > resultHandler ) { } }
if ( realCallback instanceof IPromise == false ) throw new RuntimeException ( "this is an error." ) ; else return ( ( IPromise ) realCallback ) . onResult ( resultHandler ) ;
public class CommandLine { /** * Copy ctor . */ public CommandLine makeCopy ( ) { } }
CommandLine cl = new CommandLine ( m_startAction ) ; // first copy the base class fields cl . m_ipcPort = m_ipcPort ; cl . m_backend = m_backend ; cl . m_leader = m_leader ; cl . m_pathToCatalog = m_pathToCatalog ; cl . m_pathToDeployment = m_pathToDeployment ; cl . m_pathToLicense = m_pathToLicense ; cl . m_noLoadLibVOLTDB = m_noLoadLibVOLTDB ; cl . m_zkInterface = m_zkInterface ; cl . m_port = m_port ; cl . m_adminPort = m_adminPort ; cl . m_internalPort = m_internalPort ; cl . m_externalInterface = m_externalInterface ; cl . m_internalInterface = m_internalInterface ; cl . m_drAgentPortStart = m_drAgentPortStart ; cl . m_httpPort = m_httpPort ; cl . m_drPublicHost = m_drPublicHost ; cl . m_drPublicPort = m_drPublicPort ; // final in baseclass : cl . m _ isEnterprise = m _ isEnterprise ; cl . m_deadHostTimeoutMS = m_deadHostTimeoutMS ; cl . m_startMode = m_startMode ; cl . m_selectedRejoinInterface = m_selectedRejoinInterface ; cl . m_quietAdhoc = m_quietAdhoc ; // final in baseclass : cl . m _ commitLogDir = new File ( " / tmp " ) ; cl . m_timestampTestingSalt = m_timestampTestingSalt ; cl . m_isRejoinTest = m_isRejoinTest ; cl . m_tag = m_tag ; cl . m_vemTag = m_vemTag ; cl . m_versionStringOverrideForTest = m_versionStringOverrideForTest ; cl . m_versionCompatibilityRegexOverrideForTest = m_versionCompatibilityRegexOverrideForTest ; cl . m_buildStringOverrideForTest = m_buildStringOverrideForTest ; cl . m_forceVoltdbCreate = m_forceVoltdbCreate ; cl . m_userSchemas = m_userSchemas ; cl . m_stagedClassesPaths = m_stagedClassesPaths ; // second , copy the derived class fields cl . includeTestOpts = includeTestOpts ; cl . debugPort = debugPort ; cl . zkport = zkport ; cl . buildDir = buildDir ; cl . volt_root = volt_root ; cl . java_library_path = java_library_path ; cl . rmi_host_name = rmi_host_name ; cl . log4j = log4j ; cl . gcRollover = gcRollover ; cl . voltFilePrefix = voltFilePrefix ; cl . initialHeap = initialHeap ; cl . maxHeap = maxHeap ; cl . classPath = classPath ; cl . javaExecutable = javaExecutable ; cl . jmxPort = jmxPort ; cl . jmxHost = jmxHost ; cl . customCmdLn = customCmdLn ; cl . m_isPaused = m_isPaused ; cl . m_meshBrokers = m_meshBrokers ; cl . m_coordinators = ImmutableSortedSet . copyOf ( m_coordinators ) ; cl . m_hostCount = m_hostCount ; cl . m_enableAdd = m_enableAdd ; cl . m_voltdbRoot = m_voltdbRoot ; cl . m_newCli = m_newCli ; cl . m_sslEnable = m_sslEnable ; cl . m_sslExternal = m_sslExternal ; cl . m_sslInternal = m_sslInternal ; cl . m_placementGroup = m_placementGroup ; // deep copy the property map if it exists if ( javaProperties != null ) { cl . javaProperties = new TreeMap < > ( ) ; for ( Entry < String , String > e : javaProperties . entrySet ( ) ) { cl . javaProperties . put ( e . getKey ( ) , e . getValue ( ) ) ; } } cl . m_missingHostCount = m_missingHostCount ; return cl ;
public class Conversion { /** * Converts binary ( represented as boolean array ) into a short using the default ( little * endian , Lsb0 ) byte and bit ordering . * @ param src the binary to convert * @ param srcPos the position in { @ code src } , in boolean unit , from where to start the * conversion * @ param dstInit initial value of the destination short * @ param dstPos the position of the lsb , in bits , in the result short * @ param nBools the number of booleans to convert * @ return a short containing the selected bits * @ throws NullPointerException if { @ code src } is { @ code null } * @ throws IllegalArgumentException if { @ code nBools - 1 + dstPos > = 16} * @ throws ArrayIndexOutOfBoundsException if { @ code srcPos + nBools > src . length } */ public static short binaryToShort ( final boolean [ ] src , final int srcPos , final short dstInit , final int dstPos , final int nBools ) { } }
if ( src . length == 0 && srcPos == 0 || 0 == nBools ) { return dstInit ; } if ( nBools - 1 + dstPos >= 16 ) { throw new IllegalArgumentException ( "nBools-1+dstPos is greater or equal to than 16" ) ; } short out = dstInit ; for ( int i = 0 ; i < nBools ; i ++ ) { final int shift = i + dstPos ; final int bits = ( src [ i + srcPos ] ? 1 : 0 ) << shift ; final int mask = 0x1 << shift ; out = ( short ) ( ( out & ~ mask ) | bits ) ; } return out ;
public class TangramEngine { /** * { @ inheritDoc } */ @ Override public void destroy ( ) { } }
RecyclerView contentView = getContentView ( ) ; if ( contentView != null ) { contentView . removeCallbacks ( updateRunnable ) ; } super . destroy ( ) ;
public class CsvBeanWriter { /** * { @ inheritDoc } */ public void write ( final Object source , final String [ ] nameMapping , final CellProcessor [ ] processors ) throws IOException { } }
// update the current row / line numbers super . incrementRowAndLineNo ( ) ; // extract the bean values extractBeanValues ( source , nameMapping ) ; // execute the processors for each column Util . executeCellProcessors ( processedColumns , beanValues , processors , getLineNumber ( ) , getRowNumber ( ) ) ; // write the list super . writeRow ( processedColumns ) ;
public class AttributePayload { /** * A JSON string containing up to three key - value pair in JSON format . For example : * < code > { \ " attributes \ " : { \ " string1 \ " : \ " string2 \ " } } < / code > * @ param attributes * A JSON string containing up to three key - value pair in JSON format . For example : < / p > * < code > { \ " attributes \ " : { \ " string1 \ " : \ " string2 \ " } } < / code > * @ return Returns a reference to this object so that method calls can be chained together . */ public AttributePayload withAttributes ( java . util . Map < String , String > attributes ) { } }
setAttributes ( attributes ) ; return this ;
public class SuspendedAccessContext { /** * Set prepared access - context on thread . * @ param accessContext The context of DB access . ( NotNull ) */ public static void setAccessContextOnThread ( AccessContext accessContext ) { } }
if ( accessContext == null ) { String msg = "The argument[accessContext] must not be null." ; throw new IllegalArgumentException ( msg ) ; } Stack < AccessContext > stack = threadLocal . get ( ) ; if ( stack == null ) { stack = new Stack < AccessContext > ( ) ; threadLocal . set ( stack ) ; } stack . add ( accessContext ) ;
public class Project { /** * Get stories in this Project filtered as specified in the passed in * filter . * @ param filter Criteria to filter on . Project will be set automatically . * If null , all stories in the project are returned . * @ param includeSubprojects Specifies whether to include items from sub * project or not . This only adds open subprojects . * @ return An Collection of Story . */ public Collection < Story > getStories ( StoryFilter filter , boolean includeSubprojects ) { } }
filter = ( filter != null ) ? filter : new StoryFilter ( ) ; return getInstance ( ) . get ( ) . story ( getFilter ( filter , includeSubprojects ) ) ;
public class ExcelUtils { /** * 无模板 、 基于注解的数据导出 * @ param data 待导出数据 * @ param clazz { @ link com . github . crab2died . annotation . ExcelField } 映射对象Class * @ param isWriteHeader 是否写入表头 * @ param os 生成的Excel待输出数据流 * @ throws Excel4JException 异常 * @ throws IOException 异常 * @ author Crab2Died */ public void exportObjects2Excel ( List < ? > data , Class clazz , boolean isWriteHeader , OutputStream os ) throws Excel4JException , IOException { } }
try ( Workbook workbook = exportExcelNoTemplateHandler ( data , clazz , isWriteHeader , null , true ) ) { workbook . write ( os ) ; }
public class QueryCacheEventPublisher { /** * TODO known issue : Locked keys will also be cleared from the query - cache after calling a map - wide event like clear / evictAll */ public void hintMapEvent ( Address caller , String mapName , EntryEventType eventType , int numberOfEntriesAffected , int partitionId ) { } }
// this collection contains all defined query - caches on this map . Collection < PartitionAccumulatorRegistry > partitionAccumulatorRegistries = getPartitionAccumulatorRegistries ( mapName ) ; for ( PartitionAccumulatorRegistry accumulatorRegistry : partitionAccumulatorRegistries ) { Accumulator accumulator = accumulatorRegistry . getOrCreate ( partitionId ) ; QueryCacheEventData singleEventData = newQueryCacheEventDataBuilder ( false ) . withPartitionId ( partitionId ) . withEventType ( eventType . getType ( ) ) . build ( ) ; accumulator . accumulate ( singleEventData ) ; }
public class ShuttleList { /** * Move the selected items in the source list to the chosen list . I . e . , add * the items to our selection model . */ protected void moveLeftToRight ( ) { } }
// Loop over the selected items and locate them in the data model , Add // these to the selection . Object [ ] sourceSelected = sourceList . getSelectedValues ( ) ; int nSourceSelected = sourceSelected . length ; int [ ] currentSelection = helperList . getSelectedIndices ( ) ; int [ ] newSelection = new int [ currentSelection . length + nSourceSelected ] ; System . arraycopy ( currentSelection , 0 , newSelection , 0 , currentSelection . length ) ; int destPos = currentSelection . length ; for ( int i = 0 ; i < sourceSelected . length ; i ++ ) { newSelection [ destPos ++ ] = indexOf ( sourceSelected [ i ] ) ; } helperList . setSelectedIndices ( newSelection ) ; update ( ) ;
public class Converters { /** * Creates an entity and populates its state based on the dbObject given . This method is primarily an internal method . Reliance on * this method may break your application in future releases . * @ param dbObj the object state to use * @ param mf the MappedField containing the metadata to use when decoding in to a field * @ param targetEntity then entity to hold the state from the database */ public void fromDBObject ( final DBObject dbObj , final MappedField mf , final Object targetEntity ) { } }
final Object object = mf . getDbObjectValue ( dbObj ) ; if ( object != null ) { final TypeConverter enc = getEncoder ( mf ) ; final Object decodedValue = enc . decode ( mf . getType ( ) , object , mf ) ; try { mf . setFieldValue ( targetEntity , decodedValue ) ; } catch ( IllegalArgumentException e ) { throw new MappingException ( format ( "Error setting value from converter (%s) for %s to %s" , enc . getClass ( ) . getSimpleName ( ) , mf . getFullName ( ) , decodedValue ) , e ) ; } }
public class BundleEntryScanner { /** * Returns the recurse by first looking for an entry in the manifest of the bundle specified by recurse manifest * header . It will return the default recurse if : * - file pattern manifest header is null * - header is not set * - header is not a string * - header is not true or false ( case insensitive ) * - header is empty * @ param bundle bundle containing the manifest * @ return found file pattern */ protected boolean getRecurse ( final Bundle bundle ) { } }
if ( m_recurseManifestHeader != null ) { final Object value = bundle . getHeaders ( ) . get ( m_recurseManifestHeader ) ; if ( value instanceof String && ( ( String ) value ) . trim ( ) . length ( ) > 0 && ( ( ( ( String ) value ) . trim ( ) . equalsIgnoreCase ( "true" ) ) || ( ( String ) value ) . trim ( ) . equalsIgnoreCase ( "false" ) ) ) { return Boolean . valueOf ( ( String ) value ) ; } } return m_recurse ;
public class BeansDescriptorImpl { /** * If not already created , a new < code > decorators < / code > element will be created and returned . Otherwise , the first * existing < code > decorators < / code > element will be returned . * @ return the instance defined for the element < code > decorators < / code > */ public Decorators < BeansDescriptor > getOrCreateDecorators ( ) { } }
List < Node > nodeList = model . get ( "decorators" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new DecoratorsImpl < BeansDescriptor > ( this , "decorators" , model , nodeList . get ( 0 ) ) ; } return createDecorators ( ) ;
public class OtpInputStream { /** * Read a two byte little endian integer from the stream . * @ return the bytes read , converted from little endian to an integer . * @ exception OtpErlangDecodeException * if the next byte cannot be read . */ public int read2LE ( ) throws OtpErlangDecodeException { } }
final byte [ ] b = new byte [ 2 ] ; try { super . read ( b ) ; } catch ( final IOException e ) { throw new OtpErlangDecodeException ( "Cannot read from input stream" ) ; } return ( b [ 1 ] << 8 & 0xff00 ) + ( b [ 0 ] & 0xff ) ;
public class ErrorLogger { /** * Write a debug message to the logs * @ param message The debug message */ public void debug ( final String message , final int level ) { } }
messages . add ( new LogMessage ( message , LogMessage . Type . DEBUG , level ) ) ; log . debug ( message ) ;
public class CollectionLiteralsTypeComputer { /** * Creates a collection type reference that comes as close as possible / necessary to its expected type . */ protected LightweightTypeReference createCollectionTypeReference ( JvmGenericType collectionType , LightweightTypeReference elementType , LightweightTypeReference expectedType , ITypeReferenceOwner owner ) { } }
ParameterizedTypeReference result = new ParameterizedTypeReference ( owner , collectionType ) ; result . addTypeArgument ( elementType ) ; if ( isIterableExpectation ( expectedType ) && ! expectedType . isAssignableFrom ( result ) ) { // avoid to assign a set literal to a list and viceversa : // at least the raw types must be assignable // https : / / bugs . eclipse . org / bugs / show _ bug . cgi ? id = 498779 if ( expectedType . getRawTypeReference ( ) . isAssignableFrom ( result . getRawTypeReference ( ) ) ) { LightweightTypeReference expectedElementType = getElementOrComponentType ( expectedType , owner ) ; if ( matchesExpectation ( elementType , expectedElementType ) ) { return expectedType ; } } } return result ;
public class DecompilerUtil { /** * Returns an appropriate output directory for the decompiled data based upon the provided { @ link JavaClassFileModel } . * This should be the top - level directory for the package ( eg , / tmp / project / foo for the file / tmp / project / foo / com / example / Foo . class ) . * This could be the same directory as the file itself , if the file is already in the output directory . If the . class file is referencing a file * in the input directory , then this will be a classes folder underneath the output directory . */ static File getOutputDirectoryForClass ( GraphContext context , JavaClassFileModel fileModel ) { } }
final File result ; WindupConfigurationModel configuration = WindupConfigurationService . getConfigurationModel ( context ) ; File inputPath = fileModel . getProjectModel ( ) . getRootProjectModel ( ) . getRootFileModel ( ) . asFile ( ) ; if ( PathUtil . isInSubDirectory ( inputPath , fileModel . asFile ( ) ) ) { String outputPath = configuration . getOutputPath ( ) . getFilePath ( ) ; result = Paths . get ( outputPath ) . resolve ( "classes" ) . toFile ( ) ; } else { String packageName = fileModel . getPackageName ( ) ; if ( StringUtils . isBlank ( packageName ) ) return fileModel . asFile ( ) . getParentFile ( ) ; String [ ] packageComponents = packageName . split ( "\\." ) ; File rootFile = fileModel . asFile ( ) . getParentFile ( ) ; for ( int i = 0 ; i < packageComponents . length ; i ++ ) { rootFile = rootFile . getParentFile ( ) ; } result = rootFile ; } return result ;
public class Ftp { /** * Opens a FTP Connection * @ return FTPCLinet * @ throws IOException * @ throws PageException */ private AFTPClient actionOpen ( ) throws IOException , PageException { } }
required ( "server" , server ) ; required ( "username" , username ) ; // required ( " password " , password ) ; AFTPClient client = getClient ( ) ; writeCfftp ( client ) ; return client ;
public class NTLMResponses { /** * Creates the NTLMv2 blob from the given target information block and * client nonce . * @ param targetInformation The target information block from the Type 2 * message . * @ param clientNonce The random 8 - byte client nonce . * @ param time the time stamp . * @ return The blob , used in the calculation of the NTLMv2 Response . */ private static byte [ ] createBlob ( byte [ ] targetInformation , byte [ ] clientNonce , long time ) { } }
byte [ ] blobSignature = new byte [ ] { ( byte ) 0x01 , ( byte ) 0x01 , ( byte ) 0x00 , ( byte ) 0x00 } ; byte [ ] reserved = new byte [ ] { ( byte ) 0x00 , ( byte ) 0x00 , ( byte ) 0x00 , ( byte ) 0x00 } ; byte [ ] unknown1 = new byte [ ] { ( byte ) 0x00 , ( byte ) 0x00 , ( byte ) 0x00 , ( byte ) 0x00 } ; byte [ ] unknown2 = new byte [ ] { ( byte ) 0x00 , ( byte ) 0x00 , ( byte ) 0x00 , ( byte ) 0x00 } ; time += 11644473600000l ; // milliseconds from January 1 , 1601 - > epoch . time *= 10000 ; // tenths of a microsecond . // convert to little - endian byte array . byte [ ] timestamp = new byte [ 8 ] ; for ( int i = 0 ; i < 8 ; i ++ ) { timestamp [ i ] = ( byte ) time ; time >>>= 8 ; } byte [ ] blob = new byte [ blobSignature . length + reserved . length + timestamp . length + clientNonce . length + unknown1 . length + targetInformation . length + unknown2 . length ] ; int offset = 0 ; System . arraycopy ( blobSignature , 0 , blob , offset , blobSignature . length ) ; offset += blobSignature . length ; System . arraycopy ( reserved , 0 , blob , offset , reserved . length ) ; offset += reserved . length ; System . arraycopy ( timestamp , 0 , blob , offset , timestamp . length ) ; offset += timestamp . length ; System . arraycopy ( clientNonce , 0 , blob , offset , clientNonce . length ) ; offset += clientNonce . length ; System . arraycopy ( unknown1 , 0 , blob , offset , unknown1 . length ) ; offset += unknown1 . length ; System . arraycopy ( targetInformation , 0 , blob , offset , targetInformation . length ) ; offset += targetInformation . length ; System . arraycopy ( unknown2 , 0 , blob , offset , unknown2 . length ) ; return blob ;
public class GenericUrl { /** * Appends the given raw encoded path to the current { @ link # pathParts } , setting field only if it * is { @ code null } or empty . * < p > The last part of the { @ link # pathParts } is merged with the first part of the path parts * computed from the given encoded path . Thus , if the current raw encoded path is { @ code " a " } , and * the given encoded path is { @ code " b " } , then the resulting raw encoded path is { @ code " ab " } . * @ param encodedPath raw encoded path or { @ code null } to ignore */ public void appendRawPath ( String encodedPath ) { } }
if ( encodedPath != null && encodedPath . length ( ) != 0 ) { List < String > appendedPathParts = toPathParts ( encodedPath ) ; if ( pathParts == null || pathParts . isEmpty ( ) ) { this . pathParts = appendedPathParts ; } else { int size = pathParts . size ( ) ; pathParts . set ( size - 1 , pathParts . get ( size - 1 ) + appendedPathParts . get ( 0 ) ) ; pathParts . addAll ( appendedPathParts . subList ( 1 , appendedPathParts . size ( ) ) ) ; } }
public class DescribeVirtualInterfacesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeVirtualInterfacesRequest describeVirtualInterfacesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeVirtualInterfacesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeVirtualInterfacesRequest . getConnectionId ( ) , CONNECTIONID_BINDING ) ; protocolMarshaller . marshall ( describeVirtualInterfacesRequest . getVirtualInterfaceId ( ) , VIRTUALINTERFACEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class NodeIndexer { /** * Adds the reference value to the document as the named field . The value ' s * string representation is added as the reference data . Additionally the * reference data is stored in the index . * @ param doc The document to which to add the field * @ param fieldName The name of the field to add * @ param internalValue The value for the field to add to the document . */ protected void addReferenceValue ( Document doc , String fieldName , Object internalValue ) { } }
String uuid = internalValue . toString ( ) ; doc . add ( createFieldWithoutNorms ( fieldName , uuid , PropertyType . REFERENCE ) ) ; doc . add ( new Field ( FieldNames . PROPERTIES , FieldNames . createNamedValue ( fieldName , uuid ) , Field . Store . YES , Field . Index . NO , Field . TermVector . NO ) ) ;
public class IOUtils { /** * < p > readInputStreamBufferedAsString . < / p > * @ param in a { @ link java . io . InputStream } object . * @ param charset a { @ link java . lang . String } object . * @ return a { @ link java . lang . String } object . * @ throws java . io . IOException if any . */ public static String readInputStreamBufferedAsString ( final InputStream in , final String charset ) throws IOException { } }
BufferedReader reader = null ; try { reader = new BufferedReader ( new UnicodeReader ( in , charset ) ) ; StringBuilder result = new StringBuilder ( ) ; char [ ] cbuf = new char [ 2048 ] ; int read ; while ( ( read = reader . read ( cbuf ) ) > 0 ) result . append ( cbuf , 0 , read ) ; return result . toString ( ) ; } finally { closeIgnoringException ( reader ) ; }
public class SSLReadServiceContext { /** * Get the buffers that will be used for output from the SSL engine . If read * buffers were supplied by the calling application channel , then they will * be used . Not , if a buffer array was supplied , the first buffer of the array * will be used ( since the SSL engine on takes a single output buffer ) . If not * supplied , one will be allocated here . The size of the buffer will be either * the JITAllocationSize set by the user , or the default size from the SSL * engine if the caller didn ' t provide anything . * Note , it is the responsibility of the application channel to release this * buffer if it gets allocated . */ private void getDecryptedNetworkBuffers ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "getDecryptedNetworkBuffers" ) ; } // Check if we already have a known decNetworkBuffer array . if ( decryptedNetBuffers == null ) { // Check if the buffer was set by the calling app channel . decryptedNetBuffers = getBuffers ( ) ; if ( decryptedNetBuffers == null ) { // Not set by calling app channel . Allocate it here . callerRequiredAllocation = true ; int allocationSize = getJITAllocateSize ( ) ; int minSize = getConnLink ( ) . getAppBufferSize ( ) ; // Ensure the value is positive . if ( allocationSize <= 0 ) { allocationSize = minSize ; } // Allocate the buffer . Note , app channel has the responsibility to release this . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "allocating JIT buffer; size=" + allocationSize ) ; } decryptedNetBuffers = SSLUtils . allocateByteBuffers ( allocationSize , bytesRequested , getConfig ( ) . getDecryptBuffersDirect ( ) , false ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Using buffers from getBuffers()" ) ; } } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Using buffers previously set" ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getDecryptedNetworkBuffers" ) ; }
public class ThreadServiceWorker { /** * Gets the next worker available to process a task . * @ return The next available worker . * @ throws InterruptedException If the thread is interrupted while waiting * for an available worker . */ private Worker getWorker ( ) throws InterruptedException { } }
while ( ! courtesyMonitor . allowTasksToRun ( ) ) { courtesyMonitor . waitFor ( ) ; } while ( numWorkers > maxWorkers ) { workerQueue . take ( ) ; numWorkers -- ; } return workerQueue . take ( ) ;
public class CapabilitiesRegistrarImpl { /** * ( non - Javadoc ) * @ see io . joynr . capabilities . CapabilitiesRegistrar # registerProvider ( java . lang . String , * io . joynr . provider . JoynrProvider , java . lang . Class , boolean ) */ @ Override public Future < Void > registerProvider ( final String domain , Object provider , ProviderQos providerQos , boolean awaitGlobalRegistration ) { } }
if ( providerQos == null ) { throw new JoynrRuntimeException ( "providerQos == null. It must not be null" ) ; } ProviderContainer providerContainer = providerContainerFactory . create ( provider ) ; String participantId = participantIdStorage . getProviderParticipantId ( domain , providerContainer . getInterfaceName ( ) , providerContainer . getMajorVersion ( ) ) ; String defaultPublicKeyId = "" ; DiscoveryEntry discoveryEntry = new DiscoveryEntry ( getVersionFromAnnotation ( provider . getClass ( ) ) , domain , providerContainer . getInterfaceName ( ) , participantId , providerQos , System . currentTimeMillis ( ) , System . currentTimeMillis ( ) + defaultExpiryTimeMs , defaultPublicKeyId ) ; final boolean isGloballyVisible = ( discoveryEntry . getQos ( ) . getScope ( ) == ProviderScope . GLOBAL ) ; messageRouter . addNextHop ( participantId , libjoynrMessagingAddress , isGloballyVisible ) ; providerDirectory . add ( participantId , providerContainer ) ; Callback < Void > callback = new Callback < Void > ( ) { @ Override public void onSuccess ( @ CheckForNull Void result ) { } @ Override public void onFailure ( JoynrRuntimeException runtimeException ) { logger . error ( "Unexpected Error while registering Provider:" , runtimeException ) ; } } ; return localDiscoveryAggregator . add ( callback , discoveryEntry , awaitGlobalRegistration ) ;
public class SqlConnRunner { /** * 分页全字段查询 < br > * 此方法不会关闭Connection * @ param conn 数据库连接对象 * @ param where 条件实体类 ( 包含表名 ) * @ param page 分页对象 * @ return 结果对象 * @ throws SQLException SQL执行异常 */ public PageResult < Entity > page ( Connection conn , Entity where , Page page ) throws SQLException { } }
return this . page ( conn , null , where , page ) ;
public class ClassHelper { /** * Determines the field via reflection look - up . * @ param clazz The java class to search in * @ param fieldName The field ' s name * @ return The field object or < code > null < / code > if no matching field was found */ public static Field getField ( Class clazz , String fieldName ) { } }
try { return clazz . getField ( fieldName ) ; } catch ( Exception ignored ) { } return null ;