signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Contents { /** * Save session ' s changes . */ public void save ( ) { } }
SC . ask ( "Do you want to save changes" , new BooleanCallback ( ) { @ Override public void execute ( Boolean yesSelected ) { if ( yesSelected ) { jcrService ( ) . save ( repository ( ) , workspace ( ) , new BaseCallback < Object > ( ) { @ Override public void onSuccess ( Object result ) { session ( ) . setHasChanges ( false ) ; updateControls ( ) ; } } ) ; } } } ) ;
public class WorkflowTriggersInner { /** * Resets a workflow trigger . * @ param resourceGroupName The resource group name . * @ param workflowName The workflow name . * @ param triggerName The workflow trigger name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > resetAsync ( String resourceGroupName , String workflowName , String triggerName ) { } }
return resetWithServiceResponseAsync ( resourceGroupName , workflowName , triggerName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class JvmExecutableImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EList < JvmFormalParameter > getParameters ( ) { } }
if ( parameters == null ) { parameters = new EObjectContainmentEList < JvmFormalParameter > ( JvmFormalParameter . class , this , TypesPackage . JVM_EXECUTABLE__PARAMETERS ) ; } return parameters ;
public class JsDocTokenStream { /** * Gets the remaining JSDoc line without the { @ link JsDocToken # EOL } , * { @ link JsDocToken # EOF } or { @ link JsDocToken # EOC } . */ @ SuppressWarnings ( "fallthrough" ) String getRemainingJSDocLine ( ) { } }
int c ; for ( ; ; ) { c = getChar ( ) ; switch ( c ) { case '*' : if ( peekChar ( ) != '/' ) { addToString ( c ) ; break ; } // fall through case EOF_CHAR : case '\n' : ungetChar ( c ) ; this . string = getStringFromBuffer ( ) ; stringBufferTop = 0 ; return this . string ; default : addToString ( c ) ; break ; } }
public class StructurizrDocumentationTemplate { /** * Adds a " Data " section relating to a { @ link SoftwareSystem } . * @ param softwareSystem the { @ link SoftwareSystem } the documentation content relates to * @ param format the { @ link Format } of the documentation content * @ param content a String containing the documentation content * @ return a documentation { @ link Section } */ @ Nonnull public Section addDataSection ( @ Nullable SoftwareSystem softwareSystem , @ Nonnull Format format , @ Nonnull String content ) { } }
return addSection ( softwareSystem , "Data" , format , content ) ;
public class ESRIFileUtil { /** * Translate a floating point value into ESRI standard . * < p > This function translate the Java NaN and infinites values * into the ESRI equivalent value . * @ param value the value . * @ return the ESRI value */ @ Pure public static double toESRI ( float value ) { } }
return ( Float . isInfinite ( value ) || Float . isNaN ( value ) ) ? ESRI_NAN : value ;
public class Utils { /** * Check that the value we have discovered is of the right type . It may not be if the field has changed type during * a reload . When this happens we will default the value for the new field and forget the one we were holding onto . * note : array forms are not compatible ( e . g . int [ ] and Integer [ ] ) * @ param registry the type registry that can be quizzed for type information * @ param result the result we have discovered and are about to return - this is never null * @ param expectedTypeDescriptor the type we are looking for ( will be primitive or Ljava / lang / String style ) * @ return the result we can return , or null if it is not compatible */ public static Object checkCompatibility ( TypeRegistry registry , Object result , String expectedTypeDescriptor ) { } }
if ( GlobalConfiguration . assertsMode ) { Utils . assertTrue ( result != null , "result should never be null" ) ; } String actualType = result . getClass ( ) . getName ( ) ; if ( expectedTypeDescriptor . length ( ) == 1 && Utils . isObjectIsUnboxableTo ( result . getClass ( ) , expectedTypeDescriptor . charAt ( 0 ) ) ) { // boxing is ok } else { if ( expectedTypeDescriptor . charAt ( 0 ) == 'L' ) { expectedTypeDescriptor = expectedTypeDescriptor . substring ( 1 , expectedTypeDescriptor . length ( ) - 1 ) . replace ( '/' , '.' ) ; } if ( ! expectedTypeDescriptor . equals ( actualType ) ) { // assignability test if ( actualType . charAt ( 0 ) == '[' || expectedTypeDescriptor . charAt ( 0 ) == '[' ) { return null ; } // In some situations we can ' t easily see the descriptor for the actualType ( e . g . it is loaded by a different , perhaps child , loader ) // Let ' s do something a bit more sophisticated here , we have the type information after all , we don ' t need to hunt for descriptors : Class < ? > actualClazz = result . getClass ( ) ; if ( isAssignableFrom ( registry , actualClazz , expectedTypeDescriptor . replace ( '/' , '.' ) ) ) { return result ; } return null ; } } return result ;
public class SvnWorkspaceProviderImpl { /** * Returns a File object whose path is the expected user directory . * Does not create or check for existence . * @ param prefix * @ param suffix * @ param parent * @ return */ private static File getUserDirectory ( final String prefix , final String suffix , final File parent ) { } }
final String dirname = formatDirName ( prefix , suffix ) ; return new File ( parent , dirname ) ;
public class AbstractApplication { /** * Print the description for the given parameter */ private static void printDescription ( Class < ? > descriptionClass ) { } }
if ( descriptionClass == null ) { return ; } try { LoggingConfiguration . setVerbose ( Level . VERBOSE ) ; LOG . verbose ( OptionUtil . describeParameterizable ( new StringBuilder ( ) , descriptionClass , FormatUtil . getConsoleWidth ( ) , "" ) . toString ( ) ) ; } catch ( Exception e ) { LOG . exception ( "Error instantiating class to describe." , e . getCause ( ) ) ; }
public class TimephasedUtility { /** * This is the main entry point used to convert the internal representation * of timephased baseline work into an external form which can * be displayed to the user . * @ param file parent project file * @ param work timephased resource assignment data * @ param rangeUnits timescale units * @ param dateList timescale date ranges * @ return list of durations , one per timescale date range */ public ArrayList < Duration > segmentBaselineWork ( ProjectFile file , List < TimephasedWork > work , TimescaleUnits rangeUnits , ArrayList < DateRange > dateList ) { } }
return segmentWork ( file . getBaselineCalendar ( ) , work , rangeUnits , dateList ) ;
public class Util { /** * normalize probabilities and check convergence by the maximum probability * @ return maximum of probabilities */ public static double normalizeProb ( double [ ] prob ) { } }
double maxp = 0 , sump = 0 ; for ( int i = 0 ; i < prob . length ; ++ i ) sump += prob [ i ] ; for ( int i = 0 ; i < prob . length ; ++ i ) { double p = prob [ i ] / sump ; if ( maxp < p ) maxp = p ; prob [ i ] = p ; } return maxp ;
public class HttpInputStream { /** * Resets the input stream for a new connection . */ public void resets ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "resets" , "resets" ) ; } this . in = null ;
public class DirectQuickSelectSketch { /** * UpdateSketch */ @ Override public UpdateSketch rebuild ( ) { } }
final int lgNomLongs = getLgNomLongs ( ) ; final int preambleLongs = mem_ . getByte ( PREAMBLE_LONGS_BYTE ) & 0X3F ; if ( getRetainedEntries ( true ) > ( 1 << lgNomLongs ) ) { quickSelectAndRebuild ( mem_ , preambleLongs , lgNomLongs ) ; } return this ;
public class CmsMessageBundleEditorModel { /** * Rename a key for all languages . * @ param oldKey the key to rename * @ param newKey the new key name * @ return < code > true < / code > if renaming was successful , < code > false < / code > otherwise . */ private boolean renameKeyForAllLanguages ( String oldKey , String newKey ) { } }
try { loadAllRemainingLocalizations ( ) ; lockAllLocalizations ( oldKey ) ; if ( hasDescriptor ( ) ) { lockDescriptor ( ) ; } } catch ( CmsException | IOException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; return false ; } for ( Entry < Locale , SortedProperties > entry : m_localizations . entrySet ( ) ) { SortedProperties localization = entry . getValue ( ) ; if ( localization . containsKey ( oldKey ) ) { String value = localization . getProperty ( oldKey ) ; localization . remove ( oldKey ) ; localization . put ( newKey , value ) ; m_changedTranslations . add ( entry . getKey ( ) ) ; } } if ( hasDescriptor ( ) ) { CmsXmlContentValueSequence messages = m_descContent . getValueSequence ( Descriptor . N_MESSAGE , Descriptor . LOCALE ) ; for ( int i = 0 ; i < messages . getElementCount ( ) ; i ++ ) { String prefix = messages . getValue ( i ) . getPath ( ) + "/" ; String key = m_descContent . getValue ( prefix + Descriptor . N_KEY , Descriptor . LOCALE ) . getStringValue ( m_cms ) ; if ( key == oldKey ) { m_descContent . getValue ( prefix + Descriptor . N_KEY , Descriptor . LOCALE ) . setStringValue ( m_cms , newKey ) ; break ; } } m_descriptorHasChanges = true ; } m_keyset . renameKey ( oldKey , newKey ) ; return true ;
public class FilteringDependencyTransitiveNodeVisitor { /** * { @ inheritDoc } */ @ Override public boolean visit ( DependencyNode node ) { } }
final boolean visit ; if ( filter . accept ( node ) ) { visit = visitor . visit ( node ) ; } else { visit = false ; } return visit ;
public class ZooKeeperClient { /** * Removes an existing node . * @ param path * @ param removeChildren * { @ code true } to indicate that child nodes should be removed * too * @ return { @ code true } if node has been removed successfully , { @ code false } * otherwise ( maybe node is not empty ) * @ since 0.4.1 * @ throws ZooKeeperException */ public boolean removeNode ( String path , boolean removeChildren ) throws ZooKeeperException { } }
try { if ( removeChildren ) { curatorFramework . delete ( ) . deletingChildrenIfNeeded ( ) . forPath ( path ) ; } else { curatorFramework . delete ( ) . forPath ( path ) ; } } catch ( KeeperException . NotEmptyException e ) { return false ; } catch ( KeeperException . NoNodeException e ) { return true ; } catch ( Exception e ) { if ( e instanceof ZooKeeperException ) { throw ( ZooKeeperException ) e ; } else { throw new ZooKeeperException ( e ) ; } } _invalidateCache ( path ) ; return true ;
public class InstancesController { /** * Get a single instance . * @ param id The application identifier . * @ return The registered application . */ @ GetMapping ( path = "/instances/{id}" , produces = MediaType . APPLICATION_JSON_VALUE ) public Mono < ResponseEntity < Instance > > instance ( @ PathVariable String id ) { } }
LOGGER . debug ( "Deliver registered instance with ID '{}'" , id ) ; return registry . getInstance ( InstanceId . of ( id ) ) . filter ( Instance :: isRegistered ) . map ( ResponseEntity :: ok ) . defaultIfEmpty ( ResponseEntity . notFound ( ) . build ( ) ) ;
public class HibernateDocumentDao { /** * { @ inheritDoc } */ public Reference get ( Reference reference ) { } }
final Criteria crit = sessionService . getSession ( ) . createCriteria ( Reference . class ) ; if ( StringUtil . isEmpty ( reference . getSections ( ) ) ) { crit . add ( Restrictions . isNull ( "sections" ) ) ; } else { crit . add ( Restrictions . eq ( "sections" , reference . getSections ( ) ) ) ; } crit . createAlias ( "requirement" , "req" ) ; crit . add ( Restrictions . eq ( "req.name" , reference . getRequirement ( ) . getName ( ) ) ) ; crit . createAlias ( "req.repository" , "reqRepo" ) ; crit . add ( Restrictions . eq ( "reqRepo.uid" , reference . getRequirement ( ) . getRepository ( ) . getUid ( ) ) ) ; crit . createAlias ( "specification" , "spec" ) ; crit . add ( Restrictions . eq ( "spec.name" , reference . getSpecification ( ) . getName ( ) ) ) ; crit . createAlias ( "spec.repository" , "specRepo" ) ; crit . add ( Restrictions . eq ( "specRepo.uid" , reference . getSpecification ( ) . getRepository ( ) . getUid ( ) ) ) ; crit . createAlias ( "systemUnderTest" , "sut" ) ; crit . add ( Restrictions . eq ( "sut.name" , reference . getSystemUnderTest ( ) . getName ( ) ) ) ; crit . createAlias ( "sut.project" , "sp" ) ; crit . add ( Restrictions . eq ( "sp.name" , reference . getSystemUnderTest ( ) . getProject ( ) . getName ( ) ) ) ; Reference result = ( Reference ) crit . uniqueResult ( ) ; HibernateLazyInitializer . init ( result ) ; return result ;
public class XImportDeclarationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case XtypePackage . XIMPORT_DECLARATION__WILDCARD : setWildcard ( WILDCARD_EDEFAULT ) ; return ; case XtypePackage . XIMPORT_DECLARATION__EXTENSION : setExtension ( EXTENSION_EDEFAULT ) ; return ; case XtypePackage . XIMPORT_DECLARATION__STATIC : setStatic ( STATIC_EDEFAULT ) ; return ; case XtypePackage . XIMPORT_DECLARATION__IMPORTED_TYPE : setImportedType ( ( JvmDeclaredType ) null ) ; return ; case XtypePackage . XIMPORT_DECLARATION__MEMBER_NAME : setMemberName ( MEMBER_NAME_EDEFAULT ) ; return ; case XtypePackage . XIMPORT_DECLARATION__IMPORTED_NAMESPACE : setImportedNamespace ( IMPORTED_NAMESPACE_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class AnnotationManager { /** * Create an annotation on the map * @ param options the annotation options defining the annotation to build * @ return the build annotation */ @ UiThread public T create ( S options ) { } }
T t = options . build ( currentId , this ) ; annotations . put ( t . getId ( ) , t ) ; currentId ++ ; updateSource ( ) ; return t ;
public class RoadPath { /** * Replies if the road segment of < var > path < / var > ( the first or the last in this order ) * that could be connected to the last point of the current path . * @ param path is the path from which a road segment should be read . * @ return the connectable segment from the < var > path < / var > ; or < code > null < / code > * if no connection is possible . * @ since 4.0 */ @ Pure public RoadSegment getConnectableSegmentToLastPoint ( RoadPath path ) { } }
assert path != null ; if ( path . isEmpty ( ) ) { return null ; } RoadConnection last1 = getLastPoint ( ) ; RoadConnection first2 = path . getFirstPoint ( ) ; RoadConnection last2 = path . getLastPoint ( ) ; last1 = last1 . getWrappedRoadConnection ( ) ; first2 = first2 . getWrappedRoadConnection ( ) ; last2 = last2 . getWrappedRoadConnection ( ) ; if ( last1 . equals ( first2 ) ) { return path . getFirstSegment ( ) ; } if ( last1 . equals ( last2 ) ) { return path . getLastSegment ( ) ; } return null ;
public class ForeignSegmentDocId { /** * { @ inheritDoc } */ int [ ] getDocumentNumbers ( MultiIndexReader reader , int [ ] docNumbers ) throws IOException { } }
int doc = reader . getDocumentNumber ( this ) ; if ( doc == - 1 ) { return EMPTY ; } else { if ( docNumbers . length == 1 ) { docNumbers [ 0 ] = doc ; return docNumbers ; } else { return new int [ ] { doc } ; } }
public class ClassificationService { /** * Return all { @ link ClassificationModel } instances that are attached to the given { @ link FileModel } instance . */ public Iterable < ClassificationModel > getClassifications ( FileModel model ) { } }
GraphTraversal < Vertex , Vertex > pipeline = new GraphTraversalSource ( getGraphContext ( ) . getGraph ( ) ) . V ( model . getElement ( ) ) ; pipeline . in ( ClassificationModel . FILE_MODEL ) ; pipeline . has ( WindupVertexFrame . TYPE_PROP , Text . textContains ( ClassificationModel . TYPE ) ) ; return new FramedVertexIterable < > ( getGraphContext ( ) . getFramed ( ) , pipeline . toList ( ) , ClassificationModel . class ) ;
public class JVnSenSegmenter { /** * Sen segment . * @ param text the text * @ return the string */ public String senSegment ( String text ) { } }
// text normalization text = text . replaceAll ( "([\t \n])+" , "$1" ) ; // System . out . println ( text ) ; // generate context predicates List markList = new ArrayList ( ) ; List data = FeatureGenerator . doFeatureGen ( new HashMap ( ) , text , markList , false ) ; if ( markList . isEmpty ( ) ) return text + "\n" ; // classify List labels = classifier . classify ( data ) ; String result = text . substring ( 0 , ( ( Integer ) markList . get ( 0 ) ) . intValue ( ) ) ; for ( int i = 0 ; i < markList . size ( ) ; ++ i ) { int curPos = ( ( Integer ) markList . get ( i ) ) . intValue ( ) ; if ( ( ( String ) labels . get ( i ) ) . equals ( positiveLabel ) ) { result += " " + text . charAt ( curPos ) + "\n" ; } else result += text . charAt ( curPos ) ; if ( i < markList . size ( ) - 1 ) { int nexPos = ( ( Integer ) markList . get ( i + 1 ) ) . intValue ( ) ; result += text . substring ( curPos + 1 , nexPos ) ; } } int finalMarkPos = ( ( Integer ) markList . get ( markList . size ( ) - 1 ) ) . intValue ( ) ; result += text . substring ( finalMarkPos + 1 , text . length ( ) ) ; // System . out . println ( result ) ; result = result . replaceAll ( "\n " , "\n" ) ; result = result . replaceAll ( "\n\n" , "\n" ) ; result = result . replaceAll ( "\\.\\. \\." , "..." ) ; return result ;
public class ScanAPI { /** * 检查wxticket参数 * @ param accessToken accessToken * @ param ticket ticket * @ return TicketCheckResult */ public static TicketCheckResult ticketCheck ( String accessToken , String ticket ) { } }
HttpUriRequest httpUriRequest = RequestBuilder . post ( ) . setHeader ( jsonHeader ) . setUri ( BASE_URI + "/scan/scanticket/check" ) . addParameter ( PARAM_ACCESS_TOKEN , API . accessToken ( accessToken ) ) . addParameter ( "ticket" , ticket ) . build ( ) ; return LocalHttpClient . executeJsonResult ( httpUriRequest , TicketCheckResult . class ) ;
public class LazyLoader { /** * Gets a new instance of the class corresponding to the key . */ public V get ( K key ) { } }
Entry < V > entry = classMap . get ( key ) ; return entry == null ? null : entry . get ( ) ;
public class MultiTaskProgress { /** * Automatically call the done or error method of this WorkProgress once all current sub - tasks are done . */ public void doneOnSubTasksDone ( ) { } }
if ( jp != null ) return ; synchronized ( tasks ) { jp = new JoinPoint < > ( ) ; for ( SubTask task : tasks ) jp . addToJoinDoNotCancel ( task . getProgress ( ) . getSynch ( ) ) ; } jp . listenInline ( new Runnable ( ) { @ Override public void run ( ) { if ( jp . hasError ( ) ) error ( jp . getError ( ) ) ; else done ( ) ; } } ) ; jp . start ( ) ;
public class TupleCombinerBuilder { /** * Adds the given once - only tuples to this combiner . */ public TupleCombinerBuilder once ( Stream < TupleRef > tupleRefs ) { } }
tupleRefs . forEach ( tupleRef -> tupleCombiner_ . addOnceTuple ( tupleRef ) ) ; return this ;
public class FeatureInfoBuilder { /** * Build a feature results information message * @ param results feature index results * @ param tolerance distance tolerance * @ param clickLocation map click location * @ param projection desired geometry projection * @ return results message or null if no results */ public String buildResultsInfoMessage ( FeatureIndexResults results , double tolerance , LatLng clickLocation , Projection projection ) { } }
String message = null ; // Fine filter results so that the click location is within the tolerance of each feature row result FeatureIndexResults filteredResults = fineFilterResults ( results , tolerance , clickLocation ) ; long featureCount = filteredResults . count ( ) ; if ( featureCount > 0 ) { int maxFeatureInfo = 0 ; if ( geometryType == GeometryType . POINT ) { maxFeatureInfo = maxPointDetailedInfo ; } else { maxFeatureInfo = maxFeatureDetailedInfo ; } if ( featureCount <= maxFeatureInfo ) { StringBuilder messageBuilder = new StringBuilder ( ) ; messageBuilder . append ( name ) . append ( "\n" ) ; int featureNumber = 0 ; DataColumnsDao dataColumnsDao = getDataColumnsDao ( ) ; for ( FeatureRow featureRow : filteredResults ) { featureNumber ++ ; if ( featureNumber > maxFeatureInfo ) { break ; } if ( featureCount > 1 ) { if ( featureNumber > 1 ) { messageBuilder . append ( "\n" ) ; } else { messageBuilder . append ( "\n" ) . append ( featureCount ) . append ( " Features" ) . append ( "\n" ) ; } messageBuilder . append ( "\n" ) . append ( "Feature " ) . append ( featureNumber ) . append ( ":" ) . append ( "\n" ) ; } int geometryColumn = featureRow . getGeometryColumnIndex ( ) ; for ( int i = 0 ; i < featureRow . columnCount ( ) ; i ++ ) { if ( i != geometryColumn ) { Object value = featureRow . getValue ( i ) ; if ( value != null ) { String columnName = featureRow . getColumnName ( i ) ; columnName = getColumnName ( dataColumnsDao , featureRow , columnName ) ; messageBuilder . append ( "\n" ) . append ( columnName ) . append ( ": " ) . append ( value ) ; } } } GeoPackageGeometryData geomData = featureRow . getGeometry ( ) ; if ( geomData != null && geomData . getGeometry ( ) != null ) { boolean printFeatures = false ; if ( geomData . getGeometry ( ) . getGeometryType ( ) == GeometryType . POINT ) { printFeatures = detailedInfoPrintPoints ; } else { printFeatures = detailedInfoPrintFeatures ; } if ( printFeatures ) { if ( projection != null ) { projectGeometry ( geomData , projection ) ; } messageBuilder . append ( "\n\n" ) ; messageBuilder . append ( GeometryPrinter . getGeometryString ( geomData . getGeometry ( ) ) ) ; } } } message = messageBuilder . toString ( ) ; } else { StringBuilder messageBuilder = new StringBuilder ( ) ; messageBuilder . append ( name ) . append ( "\n\t" ) . append ( featureCount ) . append ( " features" ) ; if ( clickLocation != null ) { messageBuilder . append ( " near location:\n" ) ; Point point = new Point ( clickLocation . longitude , clickLocation . latitude ) ; messageBuilder . append ( GeometryPrinter . getGeometryString ( point ) ) ; } message = messageBuilder . toString ( ) ; } } return message ;
public class ServletSupport { /** * Redirects http - request to url * @ param url */ public static void redirect ( String url , boolean copyParameters , ServletRequest req , ServletResponse res ) { } }
if ( url == null ) { throw new IllegalArgumentException ( "URL cannot be null" ) ; } String redirectUrl = url ; char separator = '?' ; if ( redirectUrl . indexOf ( '?' ) != - 1 ) { separator = '&' ; } if ( copyParameters ) { Enumeration e = req . getParameterNames ( ) ; while ( e . hasMoreElements ( ) ) { String name = ( String ) e . nextElement ( ) ; String value = req . getParameter ( name ) ; redirectUrl += separator + name + '=' + value ; separator = '&' ; } e = req . getAttributeNames ( ) ; /* while ( e . hasMoreElements ( ) ) String name = ( String ) e . nextElement ( ) ; String value = req . getParameter ( name ) ; redirectUrl + = separator + name + ' = ' + value ; separator = ' & ' ; */ } System . out . println ( new LogEntry ( "about to redirect to " + redirectUrl ) ) ; try { ( ( HttpServletResponse ) res ) . sendRedirect ( redirectUrl ) ; } catch ( IOException ioe ) { throw new ResourceException ( "redirect to '" + redirectUrl + "' failed with message: " + ioe . getMessage ( ) , ioe ) ; }
public class Futures { /** * Returns a { @ code Future } whose result is taken from the given primary * { @ code input } or , if the primary input fails , from the { @ code Future } * provided by the { @ code fallback } . { @ link FutureFallback # create } is not * invoked until the primary input has failed , so if the primary input * succeeds , it is never invoked . If , during the invocation of { @ code * fallback } , an exception is thrown , this exception is used as the result of * the output { @ code Future } . * < p > Below is an example of a fallback that returns a default value if an * exception occurs : * < pre > { @ code * ListenableFuture < Integer > fetchCounterFuture = . . . ; * / / Falling back to a zero counter in case an exception happens when * / / processing the RPC to fetch counters . * ListenableFuture < Integer > faultTolerantFuture = Futures . withFallback ( * fetchCounterFuture , new FutureFallback < Integer > ( ) { * public ListenableFuture < Integer > create ( Throwable t ) { * / / Returning " 0 " as the default for the counter when the * / / exception happens . * return immediateFuture ( 0 ) ; * } ) ; } < / pre > * < p > The fallback can also choose to propagate the original exception when * desired : * < pre > { @ code * ListenableFuture < Integer > fetchCounterFuture = . . . ; * / / Falling back to a zero counter only in case the exception was a * / / TimeoutException . * ListenableFuture < Integer > faultTolerantFuture = Futures . withFallback ( * fetchCounterFuture , new FutureFallback < Integer > ( ) { * public ListenableFuture < Integer > create ( Throwable t ) { * if ( t instanceof TimeoutException ) { * return immediateFuture ( 0 ) ; * return immediateFailedFuture ( t ) ; * } ) ; } < / pre > * < p > Note : If the derived { @ code Future } is slow or heavyweight to create * ( whether the { @ code Future } itself is slow or heavyweight to complete is * irrelevant ) , consider { @ linkplain # withFallback ( ListenableFuture , * FutureFallback , Executor ) supplying an executor } . If you do not supply an * executor , { @ code withFallback } will use a * { @ linkplain MoreExecutors # directExecutor direct executor } , which carries * some caveats for heavier operations . For example , the call to { @ code * fallback . create } may run on an unpredictable or undesirable thread : * < ul > * < li > If the input { @ code Future } is done at the time { @ code withFallback } * is called , { @ code withFallback } will call { @ code fallback . create } inline . * < li > If the input { @ code Future } is not yet done , { @ code withFallback } will * schedule { @ code fallback . create } to be run by the thread that completes * the input { @ code Future } , which may be an internal system thread such as * an RPC network thread . * < / ul > * < p > Also note that , regardless of which thread executes the { @ code * fallback . create } , all other registered but unexecuted listeners are * prevented from running during its execution , even if those listeners are * to run in other executors . * @ param input the primary input { @ code Future } * @ param fallback the { @ link FutureFallback } implementation to be called if * { @ code input } fails * @ since 14.0 */ public static < V > ListenableFuture < V > withFallback ( ListenableFuture < ? extends V > input , FutureFallback < ? extends V > fallback ) { } }
return withFallback ( input , fallback , directExecutor ( ) ) ;
public class ConfigHelper { /** * Resolve image with an external image resolver * @ param images the original image config list ( can be null ) * @ param imageResolver the resolver used to extend on an image configuration * @ param imageNameFilter filter to select only certain image configurations with the given name * @ param imageCustomizer final customization hook for mangling the configuration * @ return a list of resolved and customized image configuration . */ public static List < ImageConfiguration > resolveImages ( Logger logger , List < ImageConfiguration > images , Resolver imageResolver , String imageNameFilter , Customizer imageCustomizer ) { } }
List < ImageConfiguration > ret = resolveConfiguration ( imageResolver , images ) ; ret = imageCustomizer . customizeConfig ( ret ) ; List < ImageConfiguration > filtered = filterImages ( imageNameFilter , ret ) ; if ( ret . size ( ) > 0 && filtered . size ( ) == 0 && imageNameFilter != null ) { List < String > imageNames = new ArrayList < > ( ) ; for ( ImageConfiguration image : ret ) { imageNames . add ( image . getName ( ) ) ; } logger . warn ( "None of the resolved images [%s] match the configured filter '%s'" , StringUtils . join ( imageNames . iterator ( ) , "," ) , imageNameFilter ) ; } return filtered ;
public class FileUtil { /** * 计算目录或文件的总大小 < br > * 当给定对象为文件时 , 直接调用 { @ link File # length ( ) } < br > * 当给定对象为目录时 , 遍历目录下的所有文件和目录 , 递归计算其大小 , 求和返回 * @ param file 目录或文件 * @ return 总大小 , bytes长度 */ public static long size ( File file ) { } }
Assert . notNull ( file , "file argument is null !" ) ; if ( false == file . exists ( ) ) { throw new IllegalArgumentException ( StrUtil . format ( "File [{}] not exist !" , file . getAbsolutePath ( ) ) ) ; } if ( file . isDirectory ( ) ) { long size = 0L ; File [ ] subFiles = file . listFiles ( ) ; if ( ArrayUtil . isEmpty ( subFiles ) ) { return 0L ; // empty directory } for ( int i = 0 ; i < subFiles . length ; i ++ ) { size += size ( subFiles [ i ] ) ; } return size ; } else { return file . length ( ) ; }
public class BlobOutputStream { /** * Writes a buffer . */ @ Override public void write ( byte [ ] buffer , int offset , int length ) throws IOException { } }
while ( length > 0 ) { if ( _bufferEnd <= _offset ) { flushBlock ( false ) ; } int sublen = Math . min ( _bufferEnd - _offset , length ) ; System . arraycopy ( buffer , offset , _buffer , _offset , sublen ) ; offset += sublen ; _offset += sublen ; length -= sublen ; }
public class ServletCallback { /** * Called on method entry for HTTP / JSP requests public static void * True method signature : void before ( long requestTime , HttpServletRequest * request , HttpServletResponse response ) * @ param request * HttpServletRequest * @ param response * HttpServletResponse */ public static void before ( Object request , Object response ) { } }
/* * Use reflection to access the HttpServletRequest / Response as using the * true method signature caused ClassLoader issues . */ Class < ? > reqClass = request . getClass ( ) ; try { /* * Retrieve the tracker from the request and increment nesting level */ HttpRequestTracker tracker ; Method getAttribute = reqClass . getMethod ( GET_ATTRIBUTE , String . class ) ; tracker = ( HttpRequestTracker ) ( getAttribute . invoke ( request , TRACKER_ATTRIBUTE ) ) ; if ( tracker == null ) { tracker = new HttpRequestTracker ( ) ; } tracker . increment ( ) ; Method setAttribute = reqClass . getMethod ( SET_ATTRIBUTE , String . class , Object . class ) ; setAttribute . invoke ( request , TRACKER_ATTRIBUTE , tracker ) ; } catch ( Exception e ) { // Log any exception caused by our injected code System . err . println ( "Javametrics: Servlet callback exception: " + e . toString ( ) ) ; e . printStackTrace ( ) ; }
public class HistoryStore { /** * return multiple values with a single request */ private void updateReadHistory ( JmxReadRequest pJmxReq , JSONObject pJson , long pTimestamp ) { } }
ObjectName name = pJmxReq . getObjectName ( ) ; if ( name . isPattern ( ) ) { // We have a pattern and hence a value structure // of bean - > attribute _ key - > attribute _ value Map < String , Object > values = ( Map < String , Object > ) pJson . get ( KEY_VALUE ) ; // Can be null if used with path and no single match occurred if ( values != null ) { JSONObject history = updateHistoryForPatternRead ( pJmxReq , pTimestamp , values ) ; if ( history . size ( ) > 0 ) { pJson . put ( KEY_HISTORY , history ) ; } } } else if ( pJmxReq . isMultiAttributeMode ( ) || ! pJmxReq . hasAttribute ( ) ) { // Multiple attributes , but a single bean . // Value has the following structure : // attribute _ key - > attribute _ value JSONObject history = addMultipleAttributeValues ( pJmxReq , ( ( Map < String , Object > ) pJson . get ( KEY_VALUE ) ) , pJmxReq . getObjectNameAsString ( ) , pTimestamp ) ; if ( history . size ( ) > 0 ) { pJson . put ( KEY_HISTORY , history ) ; } } else { // Single attribute , single bean . Value is the attribute _ value // itself . addAttributeFromSingleValue ( pJson , new HistoryKey ( pJmxReq ) , KEY_HISTORY , pJson . get ( KEY_VALUE ) , pTimestamp ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcSIPrefix ( ) { } }
if ( ifcSIPrefixEEnum == null ) { ifcSIPrefixEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 890 ) ; } return ifcSIPrefixEEnum ;
public class JavascriptReorderingFilter { /** * Move all included JavaScript files to the end of < code > BODY < / code > tag in the same order * that they appeared in the original HTML response . * @ param servletRequest * the incoming { @ link ServletRequest } instance * @ param servletResponse * the outgoing { @ link ServletResponse } instance * @ param filterChain * the { @ link FilterChain } being executed * @ throws IOException * if something fails * @ throws ServletException * if something fails * @ see javax . servlet . Filter # doFilter ( javax . servlet . ServletRequest , * javax . servlet . ServletResponse , javax . servlet . FilterChain ) */ @ Override public void doFilter ( ServletRequest servletRequest , ServletResponse servletResponse , FilterChain filterChain ) throws IOException , ServletException { } }
// try and see if this request is for an HTML page HttpServletRequest request = ( HttpServletRequest ) servletRequest ; String uri = request . getRequestURI ( ) ; final boolean htmlPage = isHtmlPage ( uri ) ; // if this is an HTML page , get the entire contents of the page // in a byte - stream if ( ! htmlPage ) { LOGGER . debug ( "Not an HTML page for javascript reordering: {}" , uri ) ; filterChain . doFilter ( servletRequest , servletResponse ) ; return ; } // run through a wrapper response object HttpServletResponseWrapperImpl wrapper = new HttpServletResponseWrapperImpl ( ( HttpServletResponse ) servletResponse ) ; filterChain . doFilter ( servletRequest , wrapper ) ; // check if this is an HTML output if ( ! "text/html" . equals ( wrapper . getContentType ( ) ) ) { LOGGER . debug ( "Response content not HTML for javascript reordering: {}" , uri ) ; // just write the plain response // this is not HTML response wrapper . copyToResponse ( servletResponse ) ; return ; } final long startTime = System . currentTimeMillis ( ) ; LOGGER . debug ( "Javascript reordering candidate found: {}" , uri ) ; writeReorderedHtml ( wrapper , servletResponse ) ; final long endTime = System . currentTimeMillis ( ) ; LOGGER . debug ( "Reordering javascript for url: {} took: {}ms" , uri , ( endTime - startTime ) ) ;
public class DbDatum { public int [ ] extractLongArray ( ) { } }
int [ ] argout ; argout = new int [ values . length ] ; for ( int i = 0 ; i < values . length ; i ++ ) { argout [ i ] = Integer . parseInt ( values [ i ] ) ; } return argout ;
public class FairSchedulerServlet { /** * Print a view of pools to the given output writer . * @ param out All html output goes here . * @ param advancedView Show advanced view if true * @ param String poolFilterSet If not null , only show this set ' s info */ private void showPools ( PrintWriter out , boolean advancedView , Set < String > poolFilterSet ) { } }
ResourceReporter reporter = jobTracker . getResourceReporter ( ) ; synchronized ( jobTracker ) { synchronized ( scheduler ) { PoolManager poolManager = scheduler . getPoolManager ( ) ; out . print ( "<h2>Active Pools</h2>\n" ) ; out . print ( "<table border=\"2\" cellpadding=\"5\" cellspacing=\"2\" " + "class=\"tablesorter\">\n" ) ; out . print ( "<thead><tr><th>Pool</th><th>Running Jobs</th>" + "<th>Preparing Jobs</th>" + "<th>Min Maps</th><th>Min Reduces</th>" + "<th>Max Maps</th><th>Max Reduces</th>" + "<th>Initialized Tasks</th>" + "<th>Max Initialized Tasks</th>" + "<th>Running/Waiting Maps</th><th>Running/Waiting Reduces</th>" + ( reporter != null ? "<th>CPU</th><th>Memory</th>" : "" ) + "<th>Map Avg Wait Seccond</th>" + "<th>Reduce Avg Wait Second</th>" + "</tr></thead><tbody>\n" ) ; List < Pool > pools = new ArrayList < Pool > ( poolManager . getPools ( ) ) ; Collections . sort ( pools , new Comparator < Pool > ( ) { public int compare ( Pool p1 , Pool p2 ) { if ( p1 . isDefaultPool ( ) ) return 1 ; else if ( p2 . isDefaultPool ( ) ) return - 1 ; else return p1 . getName ( ) . compareTo ( p2 . getName ( ) ) ; } } ) ; int numActivePools = 0 ; int totalInitedTasks = 0 ; int totalMaxInitedTasks = 0 ; int totalRunningMaps = 0 ; int totalWaitingMaps = 0 ; int totalRunningReduces = 0 ; int totalWaitingReduces = 0 ; int totalMinReduces = 0 ; int totalMaxReduces = 0 ; int totalMinMaps = 0 ; int totalMaxMaps = 0 ; int totalRunningJobs = 0 ; long totalMapWaitTime = 0 ; long totalReduceWaitTime = 0 ; long totalNonConfiguredFirstMapWaitTime = 0 ; long totalNonConfiguredFirstReduceWaitTime = 0 ; long totalJobsInNonConfiguredPools = 0 ; int totalReduceTasks = 0 ; int totalMapTasks = 0 ; int totalPrepareJobs = 0 ; double totalCpu = 0 ; double totalMemory = 0 ; for ( Pool pool : pools ) { String poolName = pool . getName ( ) ; if ( ( poolFilterSet != null ) && ! poolFilterSet . contains ( poolName ) ) { continue ; } int initedTasks = 0 ; int runningMaps = 0 ; int waitingMaps = 0 ; int runningReduces = 0 ; int waitingReduces = 0 ; long poolMapWaitTime = 0 ; long poolReduceWaitTime = 0 ; int poolMapTasks = 0 ; int poolReduceTasks = 0 ; int poolPrepareJobs = 0 ; long poolFirstMapWaitTime = 0 ; long poolFirstReduceWaitTime = 0 ; boolean isConfiguredPool = pool . isConfiguredPool ( ) ; for ( JobInProgress job : pool . getJobs ( ) ) { if ( job . getStatus ( ) . getRunState ( ) == JobStatus . PREP ) { poolPrepareJobs += 1 ; } if ( isConfiguredPool ) { totalJobsInNonConfiguredPools ++ ; totalNonConfiguredFirstMapWaitTime += job . getFirstMapWaitTime ( ) ; totalNonConfiguredFirstReduceWaitTime += job . getFirstReduceWaitTime ( ) ; } JobInfo info = scheduler . infos . get ( job ) ; if ( info != null ) { initedTasks += info . totalInitedTasks ; runningMaps += info . runningMaps ; runningReduces += info . runningReduces ; waitingMaps += info . neededMaps ; waitingReduces += info . neededReduces ; poolMapWaitTime += job . getTotalMapWaitTime ( ) ; poolReduceWaitTime += job . getTotalReduceWaitTime ( ) ; poolMapTasks += job . desiredMaps ( ) ; poolReduceTasks += job . desiredReduces ( ) ; } } double poolMapAverageWaitTime = 0 ; double poolReduceAverageWaitTime = 0 ; if ( poolMapTasks != 0 ) { poolMapAverageWaitTime = ( double ) poolMapWaitTime / poolMapTasks ; totalMapWaitTime += poolMapWaitTime ; totalMapTasks += poolMapTasks ; } if ( poolReduceTasks != 0 ) { poolReduceAverageWaitTime = ( double ) poolReduceWaitTime / poolReduceTasks ; totalReduceWaitTime += poolReduceWaitTime ; totalReduceTasks += poolReduceTasks ; } int runningJobs = pool . getJobs ( ) . size ( ) ; int minMaps = poolManager . getMinSlots ( poolName , TaskType . MAP ) ; int minReduces = poolManager . getMinSlots ( poolName , TaskType . REDUCE ) ; int maxMaps = poolManager . getMaxSlots ( poolName , TaskType . MAP ) ; int maxReduces = poolManager . getMaxSlots ( poolName , TaskType . REDUCE ) ; int maxInitedTasks = poolManager . getPoolMaxInitedTasks ( poolName ) ; totalRunningJobs += runningJobs ; totalInitedTasks += initedTasks ; totalRunningMaps += runningMaps ; totalWaitingMaps += waitingMaps ; totalRunningReduces += runningReduces ; totalWaitingReduces += waitingReduces ; totalMinMaps += minMaps ; totalMinReduces += minReduces ; if ( runningJobs == 0 && minMaps == 0 && minReduces == 0 && maxMaps == Integer . MAX_VALUE && maxReduces == Integer . MAX_VALUE && initedTasks == 0 && runningMaps == 0 && runningReduces == 0 ) { continue ; } numActivePools ++ ; out . print ( "<tr>\n" ) ; out . printf ( "<td>%s</td>\n" , poolName ) ; out . printf ( "<td>%s</td>\n" , runningJobs ) ; out . printf ( "<td>%s</td>\n" , poolPrepareJobs ) ; out . printf ( "<td>%s</td>\n" , minMaps ) ; out . printf ( "<td>%s</td>\n" , minReduces ) ; if ( maxMaps == Integer . MAX_VALUE ) { out . printf ( "<td>-</td>\n" ) ; } else { out . printf ( "<td>%s</td>\n" , maxMaps ) ; totalMaxMaps += maxMaps ; } if ( maxReduces == Integer . MAX_VALUE ) { out . printf ( "<td>-</td>\n" ) ; } else { out . printf ( "<td>%s</td>\n" , maxReduces ) ; totalMaxReduces += maxReduces ; } out . printf ( "<td>%s</td>\n" , initedTasks ) ; if ( maxInitedTasks == Integer . MAX_VALUE ) { out . printf ( "<td>-</td>\n" ) ; } else { out . printf ( "<td>%s</td>\n" , maxInitedTasks ) ; totalMaxInitedTasks += maxInitedTasks ; } out . printf ( "<td>%s/%s</td>\n" , runningMaps , waitingMaps ) ; out . printf ( "<td>%s/%s</td>\n" , runningReduces , waitingReduces ) ; // Compute the CPU and memory usage double cpuUsage = 0 ; // in percentage double memoryUsage = 0 ; // in percentage if ( reporter != null ) { for ( JobInProgress job : pool . getJobs ( ) ) { double cpu = reporter . getJobCpuPercentageOnCluster ( job . getJobID ( ) ) ; double memory = reporter . getJobMemPercentageOnCluster ( job . getJobID ( ) ) ; cpuUsage += cpu != ResourceReporter . UNAVAILABLE ? cpu : 0 ; memoryUsage += memory != ResourceReporter . UNAVAILABLE ? memory : 0 ; } out . printf ( "<td>%.1f%%</td>\n" , cpuUsage ) ; out . printf ( "<td>%.1f%%</td>\n" , memoryUsage ) ; } totalCpu += cpuUsage ; totalMemory += memoryUsage ; totalPrepareJobs += poolPrepareJobs ; out . printf ( "<td>%.1f</td>\n" , poolMapAverageWaitTime / 1000D ) ; out . printf ( "<td>%.1f</td>\n" , poolReduceAverageWaitTime / 1000D ) ; out . print ( "</tr>\n" ) ; } out . print ( "<tr>\n" ) ; out . printf ( "<td>Total</td>\n" ) ; out . printf ( "<td>%s</td>\n" , totalRunningJobs ) ; out . printf ( "<td>%s</td>\n" , totalPrepareJobs ) ; out . printf ( "<td>%s</td>\n" , totalMinMaps ) ; out . printf ( "<td>%s</td>\n" , totalMinReduces ) ; if ( totalMaxMaps == 0 ) { out . printf ( "<td>-</td>\n" ) ; } else { out . printf ( "<td>%s</td>\n" , totalMaxMaps ) ; } if ( totalMaxReduces == 0 ) { out . printf ( "<td>-</td>\n" ) ; } else { out . printf ( "<td>%s</td>\n" , totalMaxReduces ) ; } out . printf ( "<td>%s</td>\n" , totalInitedTasks ) ; out . printf ( "<td>%s</td>\n" , totalMaxInitedTasks ) ; out . printf ( "<td>%s/%s</td>\n" , totalRunningMaps , totalWaitingMaps ) ; out . printf ( "<td>%s/%s</td>\n" , totalRunningReduces , totalWaitingReduces ) ; if ( reporter != null ) { out . printf ( "<td>%.1f%%</td>\n" , totalCpu ) ; out . printf ( "<td>%.1f%%</td>\n" , totalMemory ) ; } double mapAverageWaitTime = totalMapTasks == 0 ? 0 : ( double ) totalMapWaitTime / totalMapTasks ; double reduceAverageWaitTime = totalReduceTasks == 0 ? 0 : ( double ) totalReduceWaitTime / totalReduceTasks ; out . printf ( "<td>%.1f</td>\n" , mapAverageWaitTime ) ; out . printf ( "<td>%.1f</td>\n" , reduceAverageWaitTime ) ; out . print ( "</tr>\n" ) ; out . print ( "</tbody></table>\n" ) ; out . printf ( "<p>Number of active/total pools : %d/%d</p>" , numActivePools , pools . size ( ) ) ; double nonConfiguredAverageFirstMapWaitTime = totalJobsInNonConfiguredPools == 0 ? 0 : ( double ) totalNonConfiguredFirstMapWaitTime / totalJobsInNonConfiguredPools ; double nonConfiguredAverageFirstReduceWaitTime = totalJobsInNonConfiguredPools == 0 ? 0 : ( double ) totalNonConfiguredFirstReduceWaitTime / totalJobsInNonConfiguredPools ; // Non - configured = = ad - hoc . out . printf ( "<p>Average first map wait time in ad-hoc pools: %f</p>" , nonConfiguredAverageFirstMapWaitTime ) ; out . printf ( "<p>Average first reduce wait time in ad-hoc pools: %f</p>" , nonConfiguredAverageFirstReduceWaitTime ) ; } }
public class CmsGalleryController { /** * Loads the root VFS entry bean for a given site selector option . < p > * @ param siteRoot the site root for which the VFS entry should be loaded * @ param filter the search filter * @ param asyncCallback the callback to call with the result */ public void loadVfsEntryBean ( final String siteRoot , final String filter , final AsyncCallback < CmsVfsEntryBean > asyncCallback ) { } }
CmsRpcAction < CmsVfsEntryBean > action = new CmsRpcAction < CmsVfsEntryBean > ( ) { @ Override public void execute ( ) { start ( 200 , false ) ; getGalleryService ( ) . loadVfsEntryBean ( siteRoot , filter , this ) ; } @ Override public void onResponse ( CmsVfsEntryBean result ) { stop ( false ) ; asyncCallback . onSuccess ( result ) ; } } ; action . execute ( ) ;
public class DestinationManager { /** * Remove a link for a pseudo desintation ID . * @ param destinationUuid The ID of the pseudo destination to remove . */ public final void removePseudoDestination ( SIBUuid12 destinationUuid ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "removePseudoDestination" , destinationUuid ) ; destinationIndex . removePseudoUuid ( destinationUuid ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removePseudoDestination" ) ;
public class CPDefinitionSpecificationOptionValuePersistenceImpl { /** * Clears the cache for all cp definition specification option values . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( ) { } }
entityCache . clearCache ( CPDefinitionSpecificationOptionValueImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ;
public class HelloDory { /** * Delete data by ID */ private void deleteData ( DoradusClient client ) { } }
DBObject dbObject = DBObject . builder ( ) . withValue ( "_ID" , "TMaguire" ) . build ( ) ; DBObjectBatch dbObjectBatch = DBObjectBatch . builder ( ) . withObject ( dbObject ) . build ( ) ; Command command = Command . builder ( ) . withName ( "Delete" ) . withParam ( "application" , "HelloSpider" ) . withParam ( "table" , "Actors" ) . withParam ( "batch" , dbObjectBatch ) . build ( ) ; RESTResponse response = client . runCommand ( command ) ; if ( response . isFailed ( ) ) { throw new RuntimeException ( "Delete batch failed: " + response . getBody ( ) ) ; }
public class FormLayout { /** * Sets the row groups , where each row in such a group gets the same group wide height . Each * group is described by an array of integers that are interpreted as row indices . The parameter * is an array of such group descriptions . < p > * < strong > Examples : < / strong > < pre > * / / Group rows 1 and 2. * setRowGroups ( new int [ ] [ ] { { 1 , 2 } } ) ; * / / Group rows 1 and 2 , and group rows 5 , 7 , and 9. * setRowGroups ( new int [ ] [ ] { { 1 , 2 } , { 5 , 7 , 9 } } ) ; * < / pre > * @ param rowGroupIndices a two - dimensional array of row group indices . * @ throws IndexOutOfBoundsException if an index is outside the grid */ public void setRowGroups ( int [ ] [ ] rowGroupIndices ) { } }
int rowCount = getRowCount ( ) ; boolean [ ] usedIndices = new boolean [ rowCount + 1 ] ; for ( int i = 0 ; i < rowGroupIndices . length ; i ++ ) { for ( int j = 0 ; j < rowGroupIndices [ i ] . length ; j ++ ) { int rowIndex = rowGroupIndices [ i ] [ j ] ; if ( rowIndex < 1 || rowIndex > rowCount ) { throw new IndexOutOfBoundsException ( "Invalid row group index " + rowIndex + " in group " + ( i + 1 ) ) ; } if ( usedIndices [ rowIndex ] ) { throw new IllegalArgumentException ( "Row index " + rowIndex + " must not be used in multiple row groups." ) ; } usedIndices [ rowIndex ] = true ; } } this . rowGroupIndices = deepClone ( rowGroupIndices ) ;
public class Quaternionf { /** * Multiply this quaternion by the quaternion represented via < code > ( qx , qy , qz , qw ) < / code > . * If < code > T < / code > is < code > this < / code > and < code > Q < / code > is the given * quaternion , then the resulting quaternion < code > R < / code > is : * < code > R = T * Q < / code > * So , this method uses post - multiplication like the matrix classes , resulting in a * vector to be transformed by < code > Q < / code > first , and then by < code > T < / code > . * @ param qx * the x component of the quaternion to multiply < code > this < / code > by * @ param qy * the y component of the quaternion to multiply < code > this < / code > by * @ param qz * the z component of the quaternion to multiply < code > this < / code > by * @ param qw * the w component of the quaternion to multiply < code > this < / code > by * @ return this */ public Quaternionf mul ( float qx , float qy , float qz , float qw ) { } }
set ( w * qx + x * qw + y * qz - z * qy , w * qy - x * qz + y * qw + z * qx , w * qz + x * qy - y * qx + z * qw , w * qw - x * qx - y * qy - z * qz ) ; return this ;
public class BlockStateChainingListener { /** * { @ inheritDoc } * @ since 2.5RC1 */ @ Override public void endLink ( ResourceReference reference , boolean freestanding , Map < String , String > parameters ) { } }
super . endLink ( reference , freestanding , parameters ) ; -- this . linkDepth ; this . previousEvent = Event . LINK ;
public class Messenger { /** * Command for starting web action * @ param webAction web action name * @ return Command for execution */ @ ObjectiveCName ( "startWebAction:" ) public Command < WebActionDescriptor > startWebAction ( final String webAction ) { } }
return modules . getExternalModule ( ) . startWebAction ( webAction ) ;
public class ElasticHashinator { /** * Returns compressed config bytes . * @ return config bytes * @ throws IOException */ private byte [ ] toCookedBytes ( ) { } }
// Allocate for a int pair per token / partition ID entry , plus a size . ByteBuffer buf = ByteBuffer . allocate ( 4 + ( m_tokenCount * 8 ) ) ; buf . putInt ( m_tokenCount ) ; // Keep tokens and partition ids separate to aid compression . for ( int zz = 3 ; zz >= 0 ; zz -- ) { int lastToken = Integer . MIN_VALUE ; for ( int ii = 0 ; ii < m_tokenCount ; ii ++ ) { int token = Bits . unsafe . getInt ( m_tokens + ( ii * 8 ) ) ; Preconditions . checkArgument ( token >= lastToken ) ; lastToken = token ; token = token >>> ( zz * 8 ) ; token = token & 0xFF ; buf . put ( ( byte ) token ) ; } } for ( int ii = 0 ; ii < m_tokenCount ; ii ++ ) { buf . putInt ( Bits . unsafe . getInt ( m_tokens + ( ii * 8 ) + 4 ) ) ; } try { return CompressionService . gzipBytes ( buf . array ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( "Failed to compress bytes" , e ) ; }
public class DeclarationTransformerImpl { /** * Processes declaration which is supposed to contain one identification * term * @ param < T > * Type of CSSProperty * @ param type * Class of CSSProperty to be stored * @ param d * Declaration to be parsed * @ param properties * Properties map where to store enum * @ return < code > true < / code > in case of success , < code > false < / code > * elsewhere */ protected < T extends CSSProperty > boolean genericOneIdent ( Class < T > type , Declaration d , Map < String , CSSProperty > properties ) { } }
if ( d . size ( ) != 1 ) return false ; return genericTermIdent ( type , d . get ( 0 ) , ALLOW_INH , d . getProperty ( ) , properties ) ;
public class JmsManagedConnectionFactoryImpl { /** * This method is not added in the interface JmsManagedConnectionFactory since it ' s for internal use only . * @ return */ public String getPassword ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getPassword" ) ; String password = jcaConnectionFactory . getPassword ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getPassword" ) ; return password ;
public class HashTreeBuilder { /** * Adds a new leaf with its metadata to the hash tree . * @ param node leaf node to be added , must not be null . * @ param metadata node ' s metadata , must not be null * @ throws HashException * @ throws KSIException */ public void add ( ImprintNode node , IdentityMetadata metadata ) throws HashException , KSIException { } }
addToHeads ( heads , aggregate ( node , metadata ) ) ;
public class Operator { /** * compares a Date with a String * @ param left * @ param right * @ return difference as int * @ throws PageException */ public static int compare ( Date left , String right ) throws PageException { } }
if ( Decision . isNumber ( right ) ) return compare ( left . getTime ( ) / 1000 , Caster . toDoubleValue ( right ) ) ; DateTime dt = DateCaster . toDateAdvanced ( right , DateCaster . CONVERTING_TYPE_OFFSET , null , null ) ; if ( dt != null ) { return compare ( left . getTime ( ) / 1000 , dt . getTime ( ) / 1000 ) ; } return Caster . toString ( left ) . compareToIgnoreCase ( right ) ;
public class ApiOvhIpLoadbalancing { /** * Get this object properties * REST : GET / ipLoadbalancing / { serviceName } / udp / farm / { farmId } / server / { serverId } * @ param serviceName [ required ] The internal name of your IP load balancing * @ param farmId [ required ] Id of your farm * @ param serverId [ required ] Id of your server * API beta */ public OvhBackendUDPServer serviceName_udp_farm_farmId_server_serverId_GET ( String serviceName , Long farmId , Long serverId ) throws IOException { } }
String qPath = "/ipLoadbalancing/{serviceName}/udp/farm/{farmId}/server/{serverId}" ; StringBuilder sb = path ( qPath , serviceName , farmId , serverId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhBackendUDPServer . class ) ;
public class StreamSupport { /** * Reads all bytes from an input stream . * @ param input * @ return * @ throws IOException */ public static byte [ ] absorbInputStream ( InputStream input ) throws IOException { } }
ByteArrayOutputStream output = null ; try { output = new ByteArrayOutputStream ( ) ; absorbInputStream ( input , output ) ; return output . toByteArray ( ) ; } finally { output . close ( ) ; }
public class AbstractTopology { /** * get all the hostIds in the partition group * contain the host ( s ) that have highest partition id * @ return all the hostIds in the partition group */ public Set < Integer > getPartitionGroupPeersContainHighestPid ( ) { } }
// find highest partition int hPid = getPartitionCount ( ) - 1 ; // find the host that contains the highest partition Collection < Integer > hHostIds = getHostIdList ( hPid ) ; if ( hHostIds == null || hHostIds . isEmpty ( ) ) { return Collections . emptySet ( ) ; } int hHostId = hHostIds . iterator ( ) . next ( ) ; return getPartitionGroupPeers ( hHostId ) ;
public class DOMUtils { /** * Parse the contents of the provided source into an element . * This uses the document builder associated with the current thread . * @ param source * @ return * @ throws IOException */ public static Element sourceToElement ( Source source ) throws IOException { } }
Element retElement = null ; if ( source instanceof StreamSource ) { StreamSource streamSource = ( StreamSource ) source ; InputStream ins = streamSource . getInputStream ( ) ; if ( ins != null ) { retElement = DOMUtils . parse ( ins ) ; } Reader reader = streamSource . getReader ( ) ; if ( reader != null ) { retElement = DOMUtils . parse ( new InputSource ( reader ) ) ; } } else if ( source instanceof DOMSource ) { DOMSource domSource = ( DOMSource ) source ; Node node = domSource . getNode ( ) ; if ( node instanceof Element ) { retElement = ( Element ) node ; } else if ( node instanceof Document ) { retElement = ( ( Document ) node ) . getDocumentElement ( ) ; } } else if ( source instanceof SAXSource ) { // The fact that JAXBSource derives from SAXSource is an implementation detail . // Thus in general applications are strongly discouraged from accessing methods defined on SAXSource . // The XMLReader object obtained by the getXMLReader method shall be used only for parsing the InputSource object returned by the getInputSource method . final boolean hasInputSource = ( ( SAXSource ) source ) . getInputSource ( ) != null ; final boolean hasXMLReader = ( ( SAXSource ) source ) . getXMLReader ( ) != null ; if ( hasInputSource || hasXMLReader ) { try { TransformerFactory tf = TransformerFactory . newInstance ( ) ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( 1024 ) ; Transformer transformer = tf . newTransformer ( ) ; transformer . setOutputProperty ( OutputKeys . OMIT_XML_DECLARATION , "yes" ) ; transformer . setOutputProperty ( OutputKeys . METHOD , "xml" ) ; transformer . transform ( source , new StreamResult ( baos ) ) ; retElement = DOMUtils . parse ( new ByteArrayInputStream ( baos . toByteArray ( ) ) ) ; } catch ( TransformerException ex ) { throw new IOException ( ex ) ; } } } else { throw MESSAGES . sourceTypeNotImplemented ( source . getClass ( ) ) ; } return retElement ;
public class Descriptor { /** * Replace all the message serializers registered with this descriptor with the the given message serializers . * @ param messageSerializers The message serializers to replace the existing ones with . * @ return A copy of this descriptor with the new message serializers . */ public Descriptor replaceAllMessageSerializers ( PMap < Type , MessageSerializer < ? , ? > > messageSerializers ) { } }
return new Descriptor ( name , calls , pathParamSerializers , messageSerializers , serializerFactory , exceptionSerializer , autoAcl , acls , headerFilter , locatableService , circuitBreaker , topicCalls ) ;
public class Record { /** * Get value { @ link ArrayWritable } value * @ param label target label * @ return { @ link ArrayWritable } value of the label . If it is not null . */ public ArrayWritable getValueArrayWritable ( String label ) { } }
HadoopObject o = getHadoopObject ( VALUE , label , ObjectUtil . ARRAY , "Array" ) ; if ( o == null ) { return null ; } return ( ArrayWritable ) o . getObject ( ) ;
public class ClassLoaders { /** * Execute the given { @ link Callable } in the { @ link ClassLoader } provided . Return the result , if any . */ public static < T > T executeIn ( ClassLoader loader , Callable < T > task ) throws Exception { } }
if ( task == null ) return null ; if ( log . isLoggable ( Level . FINE ) ) { log . fine ( "ClassLoader [" + loader + "] task began." ) ; } ClassLoader original = SecurityActions . getContextClassLoader ( ) ; try { SecurityActions . setContextClassLoader ( loader ) ; return task . call ( ) ; } finally { SecurityActions . setContextClassLoader ( original ) ; if ( log . isLoggable ( Level . FINE ) ) { log . fine ( "ClassLoader [" + loader + "] task ended." ) ; } }
public class ToStringBuilder { /** * < p > Uses < code > ReflectionToStringBuilder < / code > to generate a * < code > toString < / code > for the specified object . < / p > * @ param object the Object to be output * @ param style the style of the < code > toString < / code > to create , may be < code > null < / code > * @ param outputTransients whether to include transient fields * @ return the String result * @ see ReflectionToStringBuilder # toString ( Object , ToStringStyle , boolean ) */ @ GwtIncompatible ( "incompatible method" ) public static String reflectionToString ( final Object object , final ToStringStyle style , final boolean outputTransients ) { } }
return ReflectionToStringBuilder . toString ( object , style , outputTransients , false , null ) ;
public class RepositoryResourceImpl { /** * { @ inheritDoc } */ @ Override public synchronized Collection < AttachmentResource > getAttachments ( ) throws RepositoryBackendException , RepositoryResourceException { } }
return Collections . < AttachmentResource > unmodifiableCollection ( getAttachmentImpls ( ) ) ;
public class ConnectedCrud { /** * insert value into the db through the specified connection . * @ param value the value * @ throws SQLException if an error occurs */ public void create ( final T value ) throws SQLException { } }
transactionTemplate . doInTransaction ( new SQLFunction < Connection , Object > ( ) { @ Override public Object apply ( Connection connection ) throws SQLException { delegate . create ( connection , value ) ; return null ; } } ) ;
public class CmsContentEditor { /** * Handles validation changes . < p > * @ param validationContext the changed validation context */ void handleValidationChange ( CmsValidationContext validationContext ) { } }
if ( validationContext . hasValidationErrors ( ) ) { String locales = "" ; for ( String id : validationContext . getInvalidEntityIds ( ) ) { if ( locales . length ( ) > 0 ) { locales += ", " ; } String locale = CmsContentDefinition . getLocaleFromId ( id ) ; if ( m_availableLocales . containsKey ( locale ) ) { locales += m_availableLocales . get ( locale ) ; } } disableSave ( Messages . get ( ) . key ( Messages . GUI_TOOLBAR_VALIDATION_ERRORS_1 , locales ) ) ; } else if ( ! m_changedEntityIds . isEmpty ( ) ) { enableSave ( ) ; }
public class LBiObjBoolPredicateBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T1 , T2 > LBiObjBoolPredicate < T1 , T2 > biObjBoolPredicateFrom ( Consumer < LBiObjBoolPredicateBuilder < T1 , T2 > > buildingFunction ) { } }
LBiObjBoolPredicateBuilder builder = new LBiObjBoolPredicateBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class PageContextImpl { /** * @ return returns the cfml compiler / public CFMLCompiler getCompiler ( ) { return compiler ; } */ @ Override public void setVariablesScope ( Variables variables ) { } }
this . variables = variables ; undefinedScope ( ) . setVariableScope ( variables ) ; if ( variables instanceof ClosureScope ) { variables = ( ( ClosureScope ) variables ) . getVariables ( ) ; } if ( variables instanceof StaticScope ) { activeComponent = ( ( StaticScope ) variables ) . getComponent ( ) ; } else if ( variables instanceof ComponentScope ) { activeComponent = ( ( ComponentScope ) variables ) . getComponent ( ) ; } else { activeComponent = null ; }
public class FloatField { /** * Move the physical binary data to this SQL parameter row . * @ param resultset The resultset to get the SQL data from . * @ param iColumn the column in the resultset that has my data . * @ exception SQLException From SQL calls . */ public void moveSQLToField ( ResultSet resultset , int iColumn ) throws SQLException { } }
float fResult = resultset . getFloat ( iColumn ) ; if ( resultset . wasNull ( ) ) this . setString ( Constants . BLANK , false , DBConstants . READ_MOVE ) ; // Null value else { if ( ( ! this . isNullable ( ) ) && ( fResult == Float . NaN ) ) this . setString ( Constants . BLANK , false , DBConstants . READ_MOVE ) ; // Null value else this . setValue ( fResult , false , DBConstants . READ_MOVE ) ; }
public class BeanManager { /** * Initialize references that lack a bean instance eagerly . * @ param beans to be initialized */ public final void initializeReferences ( Collection < Bean > beans ) { } }
Map < BeanId , Bean > indexed = BeanUtils . uniqueIndex ( beans ) ; for ( Bean bean : beans ) { for ( String name : bean . getReferenceNames ( ) ) { List < BeanId > ids = bean . getReference ( name ) ; if ( ids == null ) { continue ; } for ( BeanId id : ids ) { Bean ref = indexed . get ( id ) ; if ( ref == null ) { Optional < Bean > optionalRef = getEager ( id ) ; if ( optionalRef . isPresent ( ) ) { ref = optionalRef . get ( ) ; } } id . setBean ( ref ) ; } } }
public class LobEngine { /** * Returns a new Blob whose length is zero . * @ param blockSize block size ( in < i > bytes < / i > ) to use * @ return new empty Blob */ public Blob createNewBlob ( int blockSize ) throws PersistException { } }
StoredLob lob = mLobStorage . prepare ( ) ; lob . setLocator ( mLocatorSequence . nextLongValue ( ) ) ; lob . setBlockSize ( blockSize ) ; lob . setLength ( 0 ) ; lob . insert ( ) ; return new BlobImpl ( lob . getLocator ( ) ) ;
public class WindowSpecificationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setYTWIND ( Integer newYTWIND ) { } }
Integer oldYTWIND = ytwind ; ytwind = newYTWIND ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . WINDOW_SPECIFICATION__YTWIND , oldYTWIND , ytwind ) ) ;
public class MessageProcessor { /** * Get the link change listener that is registered with TRM for changes to * the WLM link groups * @ return LinkChangeListener */ public LinkChangeListener getLinkChangeListener ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getLinkChangeListener" ) ; SibTr . exit ( tc , "getLinkChangeListener" , _linkChangeListener ) ; } return _linkChangeListener ;
public class Jpa2EventStore { /** * { @ inheritDoc } */ @ Override public Collection < Event > findEvents ( SearchCriteria criteria ) { } }
Collection < Event > eventsAllTimestamps = eventRepository . find ( criteria ) ; // timestamp stored as string not queryable in DB , all timestamps come back , still need to filter this subset return findEvents ( criteria , eventsAllTimestamps ) ;
public class TangoCacheManager { /** * Get cache of an attribute * @ param attr * the attribute * @ return the attribute cache * @ throws NoCacheFoundException if cache for the attribute is not found */ public synchronized SelfPopulatingCache getAttributeCache ( final AttributeImpl attr ) throws NoCacheFoundException { } }
if ( attr . getName ( ) . equalsIgnoreCase ( DeviceImpl . STATE_NAME ) ) { return stateCache . getCache ( ) ; } else if ( attr . getName ( ) . equalsIgnoreCase ( DeviceImpl . STATUS_NAME ) ) { return statusCache . getCache ( ) ; } else { return tryGetAttributeCache ( attr ) ; }
public class FamilyBuilderImpl { /** * Add a person as the wife in a family . * @ param family the family * @ param person the person * @ return the wife object */ public Wife addWifeToFamily ( final Family family , final Person person ) { } }
if ( family == null || person == null ) { return new Wife ( ) ; } final FamS famS = new FamS ( person , "FAMS" , new ObjectId ( family . getString ( ) ) ) ; final Wife wife = new Wife ( family , "Wife" , new ObjectId ( person . getString ( ) ) ) ; family . insert ( wife ) ; person . insert ( famS ) ; return wife ;
public class FlowController { /** * Send a Page Flow error to the browser . * @ param errText the error message to display . * @ param response the current HttpServletResponse . */ protected void sendError ( String errText , HttpServletRequest request , HttpServletResponse response ) throws IOException { } }
InternalUtils . sendError ( "PageFlow_Custom_Error" , null , request , response , new Object [ ] { getDisplayName ( ) , errText } ) ;
public class PreorderVisitor { /** * If currently visiting a field , get the field ' s fully qualified name */ public String getFullyQualifiedFieldName ( ) { } }
if ( ! visitingField ) { throw new IllegalStateException ( "getFullyQualifiedFieldName called while not visiting field" ) ; } if ( fullyQualifiedFieldName == null ) { fullyQualifiedFieldName = getDottedClassName ( ) + "." + getFieldName ( ) + " : " + getFieldSig ( ) ; } return fullyQualifiedFieldName ;
public class GenTask { /** * Merges the specified template using the supplied mapping of keys to objects . * @ param data a series of key , value pairs where the keys must be strings and the values can * be any object . */ protected String mergeTemplate ( String template , Object ... data ) throws IOException { } }
return mergeTemplate ( template , createMap ( data ) ) ;
public class MailClient { /** * return all messages from inbox * @ param messageNumbers all messages with this ids * @ param uIds all messages with this uids * @ param withBody also return body * @ return all messages from inbox * @ throws MessagingException * @ throws IOException */ public Query getMails ( String [ ] messageNumbers , String [ ] uids , boolean all ) throws MessagingException , IOException { } }
Query qry = new QueryImpl ( all ? _fldnew : _flddo , 0 , "query" ) ; Folder folder = _store . getFolder ( "INBOX" ) ; folder . open ( Folder . READ_ONLY ) ; try { getMessages ( qry , folder , uids , messageNumbers , startrow , maxrows , all ) ; } finally { folder . close ( false ) ; } return qry ;
public class MaskFormat { /** * ( non - Javadoc ) * @ see java . text . Format # format ( java . lang . Object , java . lang . StringBuffer , * java . text . FieldPosition ) */ public StringBuffer format ( Object obj , StringBuffer toAppendTo , FieldPosition pos ) { } }
if ( obj == null ) return null ; return toAppendTo . append ( this . format ( obj . toString ( ) ) ) ;
public class SearchAttribute { /** * Returns all the search annotations for the attribute . Imports are processed automatically . */ public List < String > getAnnotations ( ) { } }
AnnotationBuilder annotations = new AnnotationBuilder ( ) ; annotations . add ( getFieldAnnotation ( ) , getFieldBridgeAnnotation ( ) , getTikaBridgeAnnotation ( ) ) ; return annotations . getAnnotations ( ) ;
public class AWSElasticsearchClient { /** * Lists available reserved Elasticsearch instance offerings . * @ param describeReservedElasticsearchInstanceOfferingsRequest * Container for parameters to < code > DescribeReservedElasticsearchInstanceOfferings < / code > * @ return Result of the DescribeReservedElasticsearchInstanceOfferings operation returned by the service . * @ throws ResourceNotFoundException * An exception for accessing or deleting a resource that does not exist . Gives http status code of 400. * @ throws ValidationException * An exception for missing / invalid input fields . Gives http status code of 400. * @ throws DisabledOperationException * An error occured because the client wanted to access a not supported operation . Gives http status code of * 409. * @ throws InternalException * The request processing has failed because of an unknown error , exception or failure ( the failure is * internal to the service ) . Gives http status code of 500. * @ sample AWSElasticsearch . DescribeReservedElasticsearchInstanceOfferings */ @ Override public DescribeReservedElasticsearchInstanceOfferingsResult describeReservedElasticsearchInstanceOfferings ( DescribeReservedElasticsearchInstanceOfferingsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeReservedElasticsearchInstanceOfferings ( request ) ;
public class ApiOvhMe { /** * List of service contact change tasks you are involved in * REST : GET / me / task / contactChange * @ param toAccount [ required ] Filter the value of toAccount property ( like ) * @ param state [ required ] Filter the value of state property ( like ) * @ param askingAccount [ required ] Filter the value of askingAccount property ( like ) */ public ArrayList < Long > task_contactChange_GET ( String askingAccount , net . minidev . ovh . api . nichandle . changecontact . OvhTaskStateEnum state , String toAccount ) throws IOException { } }
String qPath = "/me/task/contactChange" ; StringBuilder sb = path ( qPath ) ; query ( sb , "askingAccount" , askingAccount ) ; query ( sb , "state" , state ) ; query ( sb , "toAccount" , toAccount ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t2 ) ;
public class UrlPropertyConfigProvider { /** * getConfig . * @ return a { @ link java . util . Properties } object . */ public Properties getConfig ( ) { } }
try { Properties properties = new Properties ( ) ; if ( null != resources . getGlobal ( ) ) populateConfigItems ( properties , resources . getGlobal ( ) ) ; if ( null != resources . getLocals ( ) ) { for ( URL url : resources . getLocals ( ) ) { populateConfigItems ( properties , url ) ; } } if ( null != resources . getUser ( ) ) populateConfigItems ( properties , resources . getUser ( ) ) ; return properties ; } catch ( Exception e ) { logger . error ( "Exception" , e ) ; throw new RuntimeException ( e ) ; }
public class RequestTable { /** * Non - blocking alternative to { @ link # forEach ( Visitor ) } : iteration is performed on the array that exists at the * time of this call . Changes to the underlying array will not be reflected in the iteration . * @ param visitor the { @ link Visitor } . */ public RequestTable < T > forEachNonBlocking ( Visitor < T > visitor ) { } }
if ( visitor == null ) return null ; T [ ] buf ; long lo , hi ; lock . lock ( ) ; try { buf = this . buffer ; lo = this . low ; hi = this . high ; } finally { lock . unlock ( ) ; } for ( long i = lo , num_iterations = 0 ; i < hi && num_iterations < buf . length ; i ++ , num_iterations ++ ) { int index = index ( i ) ; T el = buf [ index ] ; if ( ! visitor . visit ( el ) ) break ; } return this ;
public class StringUtil { /** * FooBarBaz → fooBarBaz */ public static String decapitalize ( String s ) { } }
if ( s == null ) { return null ; } return Introspector . decapitalize ( s ) ;
public class StreamAppenderatorDriver { /** * Execute a task in background to publish all segments corresponding to the given sequence names . The task * internally pushes the segments to the deep storage first , and then publishes the metadata to the metadata storage . * @ param publisher segment publisher * @ param committer committer * @ param sequenceNames a collection of sequence names to be published * @ return a { @ link ListenableFuture } for the submitted task which removes published { @ code sequenceNames } from * { @ code activeSegments } and { @ code publishPendingSegments } */ public ListenableFuture < SegmentsAndMetadata > publish ( final TransactionalSegmentPublisher publisher , final Committer committer , final Collection < String > sequenceNames ) { } }
final List < SegmentIdWithShardSpec > theSegments = getSegmentWithStates ( sequenceNames ) . map ( SegmentWithState :: getSegmentIdentifier ) . collect ( Collectors . toList ( ) ) ; final ListenableFuture < SegmentsAndMetadata > publishFuture = ListenableFutures . transformAsync ( // useUniquePath = true prevents inconsistencies in segment data when task failures or replicas leads to a second // version of a segment with the same identifier containing different data ; see DataSegmentPusher . push ( ) docs pushInBackground ( wrapCommitter ( committer ) , theSegments , true ) , sam -> publishInBackground ( sam , publisher ) ) ; return Futures . transform ( publishFuture , ( Function < ? super SegmentsAndMetadata , ? extends SegmentsAndMetadata > ) sam -> { synchronized ( segments ) { sequenceNames . forEach ( segments :: remove ) ; } return sam ; } ) ;
public class MiniMax { /** * Update the entries of the matrices that contain a distance to c , the newly * merged cluster . * @ param size number of ids in the data set * @ param mat matrix paradigm * @ param prots calculated prototypes * @ param builder Result builder * @ param clusters the clusters * @ param dq distance query of the data set * @ param c the cluster to update distances to */ protected static < O > void updateMatrices ( int size , MatrixParadigm mat , DBIDArrayMIter prots , PointerHierarchyRepresentationBuilder builder , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < O > dq , int c ) { } }
final DBIDArrayIter ix = mat . ix , iy = mat . iy ; // c is the new cluster . // Update entries ( at ( x , y ) with x > y ) in the matrix where x = c or y = c // Update entries at ( c , y ) with y < c ix . seek ( c ) ; for ( iy . seek ( 0 ) ; iy . getOffset ( ) < c ; iy . advance ( ) ) { // Skip entry if already merged if ( builder . isLinked ( iy ) ) { continue ; } updateEntry ( mat , prots , clusters , dq , c , iy . getOffset ( ) ) ; } // Update entries at ( x , c ) with x > c iy . seek ( c ) ; for ( ix . seek ( c + 1 ) ; ix . valid ( ) ; ix . advance ( ) ) { // Skip entry if already merged if ( builder . isLinked ( ix ) ) { continue ; } updateEntry ( mat , prots , clusters , dq , ix . getOffset ( ) , c ) ; }
public class DefaultSensorStorage { /** * Thread safe assuming that each issues for each file are only written once . */ @ Override public void store ( Issue issue ) { } }
if ( issue . primaryLocation ( ) . inputComponent ( ) instanceof DefaultInputFile ) { DefaultInputFile defaultInputFile = ( DefaultInputFile ) issue . primaryLocation ( ) . inputComponent ( ) ; if ( shouldSkipStorage ( defaultInputFile ) ) { return ; } defaultInputFile . setPublished ( true ) ; } moduleIssues . initAndAddIssue ( issue ) ;
public class Manager { /** * Removes a messaging client factory . * WARNING : this method is made available only to be used in non - OSGi environments * ( e . g . Maven , embedded mode , etc ) . If you are not sure , do not use it . * @ param clientFactory a non - null client factory */ public void removeMessagingFactory ( IMessagingClientFactory clientFactory ) { } }
if ( this . messagingClient . getRegistry ( ) != null ) this . messagingClient . getRegistry ( ) . removeMessagingClientFactory ( clientFactory ) ;
public class DocumentationTemplate { /** * Adds a section relating to a { @ link Component } . * @ param component the { @ link Component } the documentation content relates to * @ param title the section title * @ param format the { @ link Format } of the documentation content * @ param content a String containing the documentation content * @ return a documentation { @ link Section } */ public Section addSection ( Component component , String title , Format format , String content ) { } }
return add ( component , title , format , content ) ;
public class DeregisterTargetFromMaintenanceWindowRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeregisterTargetFromMaintenanceWindowRequest deregisterTargetFromMaintenanceWindowRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deregisterTargetFromMaintenanceWindowRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deregisterTargetFromMaintenanceWindowRequest . getWindowId ( ) , WINDOWID_BINDING ) ; protocolMarshaller . marshall ( deregisterTargetFromMaintenanceWindowRequest . getWindowTargetId ( ) , WINDOWTARGETID_BINDING ) ; protocolMarshaller . marshall ( deregisterTargetFromMaintenanceWindowRequest . getSafe ( ) , SAFE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MiscUtils { /** * Serialize the deferred serializer data into byte buffer * @ param mbuf ByteBuffer the buffer is written to * @ param ds DeferredSerialization data writes to the byte buffer * @ return size of data * @ throws IOException */ public static int writeDeferredSerialization ( ByteBuffer mbuf , DeferredSerialization ds ) throws IOException { } }
int written = 0 ; try { final int objStartPosition = mbuf . position ( ) ; ds . serialize ( mbuf ) ; written = mbuf . position ( ) - objStartPosition ; } finally { ds . cancel ( ) ; } return written ;
public class SQLPPMapping2DatalogConverter { /** * returns a Datalog representation of the mappings */ public ImmutableMap < CQIE , PPMappingAssertionProvenance > convert ( Collection < SQLPPTriplesMap > triplesMaps , RDBMetadata metadata ) throws InvalidMappingSourceQueriesException { } }
Map < CQIE , PPMappingAssertionProvenance > mutableMap = new HashMap < > ( ) ; List < String > errorMessages = new ArrayList < > ( ) ; QuotedIDFactory idfac = metadata . getQuotedIDFactory ( ) ; for ( SQLPPTriplesMap mappingAxiom : triplesMaps ) { try { OBDASQLQuery sourceQuery = mappingAxiom . getSourceQuery ( ) ; List < Function > body ; ImmutableMap < QualifiedAttributeID , Term > lookupTable ; try { SelectQueryParser sqp = new SelectQueryParser ( metadata , termFactory , typeFactory ) ; RAExpression re = sqp . parse ( sourceQuery . toString ( ) ) ; lookupTable = re . getAttributes ( ) ; body = new ArrayList < > ( re . getDataAtoms ( ) . size ( ) + re . getFilterAtoms ( ) . size ( ) ) ; body . addAll ( re . getDataAtoms ( ) ) ; body . addAll ( re . getFilterAtoms ( ) ) ; } catch ( UnsupportedSelectQueryException e ) { ImmutableList < QuotedID > attributes = new SelectQueryAttributeExtractor ( metadata , termFactory ) . extract ( sourceQuery . toString ( ) ) ; ParserViewDefinition view = metadata . createParserView ( sourceQuery . toString ( ) , attributes ) ; // this is required to preserve the order of the variables ImmutableList < Map . Entry < QualifiedAttributeID , Variable > > list = view . getAttributes ( ) . stream ( ) . map ( att -> new AbstractMap . SimpleEntry < > ( new QualifiedAttributeID ( null , att . getID ( ) ) , // strip off the ParserViewDefinitionName termFactory . getVariable ( att . getID ( ) . getName ( ) ) ) ) . collect ( ImmutableCollectors . toList ( ) ) ; lookupTable = list . stream ( ) . collect ( ImmutableCollectors . toMap ( Map . Entry :: getKey , Map . Entry :: getValue ) ) ; List < Term > arguments = list . stream ( ) . map ( Map . Entry :: getValue ) . collect ( ImmutableCollectors . toList ( ) ) ; body = new ArrayList < > ( 1 ) ; body . add ( termFactory . getFunction ( view . getAtomPredicate ( ) , arguments ) ) ; } for ( TargetAtom atom : mappingAxiom . getTargetAtoms ( ) ) { PPMappingAssertionProvenance provenance = mappingAxiom . getMappingAssertionProvenance ( atom ) ; try { Function mergedAtom = immutabilityTools . convertToMutableFunction ( atom . getProjectionAtom ( ) . getPredicate ( ) , atom . getSubstitutedTerms ( ) ) ; Function head = renameVariables ( mergedAtom , lookupTable , idfac ) ; CQIE rule = datalogFactory . getCQIE ( head , body ) ; PPMappingAssertionProvenance previous = mutableMap . put ( rule , provenance ) ; if ( previous != null ) LOGGER . warn ( "Redundant triples maps: \n" + provenance + "\n and \n" + previous ) ; } catch ( AttributeNotFoundException e ) { errorMessages . add ( "Error: " + e . getMessage ( ) + " \nProblem location: source query of the mapping assertion \n[" + provenance . getProvenanceInfo ( ) + "]" ) ; } } } catch ( InvalidSelectQueryException e ) { errorMessages . add ( "Error: " + e . getMessage ( ) + " \nProblem location: source query of triplesMap \n[" + mappingAxiom . getTriplesMapProvenance ( ) . getProvenanceInfo ( ) + "]" ) ; } } if ( ! errorMessages . isEmpty ( ) ) throw new InvalidMappingSourceQueriesException ( Joiner . on ( "\n\n" ) . join ( errorMessages ) ) ; return ImmutableMap . copyOf ( mutableMap ) ;
public class Intersectiond { /** * Test whether the ray with given origin < code > ( originX , originY , originZ ) < / code > and direction < code > ( dirX , dirY , dirZ ) < / code > intersects the plane * given as the general plane equation < i > a * x + b * y + c * z + d = 0 < / i > , and return the * value of the parameter < i > t < / i > in the ray equation < i > p ( t ) = origin + t * dir < / i > of the intersection point . * This method returns < code > - 1.0 < / code > if the ray does not intersect the plane , because it is either parallel to the plane or its direction points * away from the plane or the ray ' s origin is on the < i > negative < / i > side of the plane ( i . e . the plane ' s normal points away from the ray ' s origin ) . * Reference : < a href = " https : / / www . siggraph . org / education / materials / HyperGraph / raytrace / rayplane _ intersection . htm " > https : / / www . siggraph . org / < / a > * @ param originX * the x coordinate of the ray ' s origin * @ param originY * the y coordinate of the ray ' s origin * @ param originZ * the z coordinate of the ray ' s origin * @ param dirX * the x coordinate of the ray ' s direction * @ param dirY * the y coordinate of the ray ' s direction * @ param dirZ * the z coordinate of the ray ' s direction * @ param a * the x factor in the plane equation * @ param b * the y factor in the plane equation * @ param c * the z factor in the plane equation * @ param d * the constant in the plane equation * @ param epsilon * some small epsilon for when the ray is parallel to the plane * @ return the value of the parameter < i > t < / i > in the ray equation < i > p ( t ) = origin + t * dir < / i > of the intersection point , if the ray * intersects the plane ; < code > - 1.0 < / code > otherwise */ public static double intersectRayPlane ( double originX , double originY , double originZ , double dirX , double dirY , double dirZ , double a , double b , double c , double d , double epsilon ) { } }
double denom = a * dirX + b * dirY + c * dirZ ; if ( denom < 0.0 ) { double t = - ( a * originX + b * originY + c * originZ + d ) / denom ; if ( t >= 0.0 ) return t ; } return - 1.0 ;
public class FamFamFlags { /** * Get the flag icon from the passed locale or < code > null < / code > . * @ param aFlagLocale * The locale to resolve . May be < code > null < / code > . * @ return < code > null < / code > if the passed locale is < code > null < / code > , if the * locale has no country or if the no flag is present for the passed * locale . */ @ Nullable public static IHCNode getFlagNodeFromLocale ( @ Nullable final Locale aFlagLocale ) { } }
final EFamFamFlagIcon eIcon = getFlagFromLocale ( aFlagLocale ) ; return eIcon == null ? null : eIcon . getAsNode ( ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcModulusOfSubgradeReactionMeasure ( ) { } }
if ( ifcModulusOfSubgradeReactionMeasureEClass == null ) { ifcModulusOfSubgradeReactionMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 841 ) ; } return ifcModulusOfSubgradeReactionMeasureEClass ;
public class DescribeSuggestersRequest { /** * The suggesters you want to describe . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSuggesterNames ( java . util . Collection ) } or { @ link # withSuggesterNames ( java . util . Collection ) } if you want * to override the existing values . * @ param suggesterNames * The suggesters you want to describe . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeSuggestersRequest withSuggesterNames ( String ... suggesterNames ) { } }
if ( this . suggesterNames == null ) { setSuggesterNames ( new com . amazonaws . internal . SdkInternalList < String > ( suggesterNames . length ) ) ; } for ( String ele : suggesterNames ) { this . suggesterNames . add ( ele ) ; } return this ;
public class ContainerAliasResolver { /** * Looks up container id for given alias that is associated with task instance * @ param alias container alias * @ param taskId unique task instance id * @ return * @ throws IllegalArgumentException in case there are no containers for given alias */ public String forTaskInstance ( String alias , long taskId ) { } }
return registry . getContainerId ( alias , new ByTaskIdContainerLocator ( taskId ) ) ;
public class BundleResource { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . service . resource . IResource # lastModified ( ) */ @ Override public long lastModified ( ) { } }
long lastmod = 0L ; try { lastmod = getURI ( ) . toURL ( ) . openConnection ( ) . getLastModified ( ) ; } catch ( IOException ignore ) { } return lastmod ;
public class Model { /** * Get attribute of mysql type : float */ public Float getFloat ( String attr ) { } }
Number n = ( Number ) attrs . get ( attr ) ; return n != null ? n . floatValue ( ) : null ;