signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Update { /** * The update is done without calling triggers and check of access rights . * @ throws EFapsException if update not possible ( unique key , object does * not exists , etc . . . ) */ public void executeWithoutTrigger ( ) throws EFapsException { } }
if ( Update . STATUSOK . getStati ( ) . isEmpty ( ) ) { final Context context = Context . getThreadContext ( ) ; ConnectionResource con = null ; try { con = context . getConnectionResource ( ) ; for ( final Entry < SQLTable , List < Value > > entry : this . table2values . entrySet ( ) ) { final SQLUpdate update = Context . getDbType ( ) . newUpdate ( entry . getKey ( ) . getSqlTable ( ) , entry . getKey ( ) . getSqlColId ( ) , getInstance ( ) . getId ( ) ) ; // iterate in reverse order and only execute the ones that are not added yet , permitting // to overwrite the value for attributes by adding them later final ReverseListIterator < Value > iterator = new ReverseListIterator < > ( entry . getValue ( ) ) ; final Set < String > added = new HashSet < > ( ) ; while ( iterator . hasNext ( ) ) { final Value value = iterator . next ( ) ; final String colKey = value . getAttribute ( ) . getSqlColNames ( ) . toString ( ) ; if ( ! added . contains ( colKey ) ) { value . getAttribute ( ) . prepareDBUpdate ( update , value . getValues ( ) ) ; added . add ( colKey ) ; } } final Set < String > updatedColumns = update . execute ( con ) ; final Iterator < Entry < Attribute , Value > > attrIter = this . trigRelevantAttr2values . entrySet ( ) . iterator ( ) ; while ( attrIter . hasNext ( ) ) { final Entry < Attribute , Value > trigRelEntry = attrIter . next ( ) ; if ( trigRelEntry . getKey ( ) . getTable ( ) . equals ( entry . getKey ( ) ) ) { boolean updated = false ; for ( final String colName : trigRelEntry . getKey ( ) . getSqlColNames ( ) ) { if ( updatedColumns . contains ( colName ) ) { updated = true ; break ; } } if ( ! updated ) { attrIter . remove ( ) ; } } } } AccessCache . registerUpdate ( getInstance ( ) ) ; Queue . registerUpdate ( getInstance ( ) ) ; } catch ( final SQLException e ) { Update . LOG . error ( "Update of '" + this . instance + "' not possible" , e ) ; throw new EFapsException ( getClass ( ) , "executeWithoutTrigger.SQLException" , e , this . instance ) ; } } else { throw new EFapsException ( getClass ( ) , "executeWithout.StatusInvalid" , Update . STATUSOK . getStati ( ) ) ; }
public class KNXnetIPDevMgmt { /** * / * ( non - Javadoc ) * @ see tuwien . auto . calimero . knxnetip . ConnectionImpl # handleService * ( tuwien . auto . calimero . knxnetip . servicetype . KNXnetIPHeader , byte [ ] , int ) */ void handleService ( KNXnetIPHeader h , byte [ ] data , int offset ) throws KNXFormatException , IOException { } }
final int svc = h . getServiceType ( ) ; if ( svc == KNXnetIPHeader . DEVICE_CONFIGURATION_REQ ) { ServiceRequest req ; try { req = PacketHelper . getServiceRequest ( h , data , offset ) ; } catch ( final KNXFormatException e ) { // try to get at least the connection header of the service request req = PacketHelper . getEmptyServiceRequest ( h , data , offset ) ; final byte [ ] junk = new byte [ h . getTotalLength ( ) - h . getStructLength ( ) - 4 ] ; System . arraycopy ( data , offset + 4 , junk , 0 , junk . length ) ; logger . warn ( "received dev.mgmt request with unknown cEMI part " + DataUnitBuilder . toHex ( junk , " " ) , e ) ; } final short seq = req . getSequenceNumber ( ) ; if ( req . getChannelID ( ) == getChannelID ( ) && seq == getSeqNoRcv ( ) ) { final short status = h . getVersion ( ) == KNXNETIP_VERSION_10 ? ErrorCodes . NO_ERROR : ErrorCodes . VERSION_NOT_SUPPORTED ; final byte [ ] buf = PacketHelper . toPacket ( new ServiceAck ( KNXnetIPHeader . DEVICE_CONFIGURATION_ACK , getChannelID ( ) , seq , status ) ) ; final DatagramPacket p = new DatagramPacket ( buf , buf . length , dataEP . getAddress ( ) , dataEP . getPort ( ) ) ; socket . send ( p ) ; incSeqNoRcv ( ) ; if ( status == ErrorCodes . VERSION_NOT_SUPPORTED ) { close ( ConnectionCloseEvent . INTERNAL , "protocol version changed" , LogLevel . ERROR , null ) ; return ; } final CEMI cemi = req . getCEMI ( ) ; // leave if we are working with an empty ( broken ) service request if ( cemi == null ) return ; final short mc = cemi . getMessageCode ( ) ; if ( mc == CEMIDevMgmt . MC_PROPINFO_IND || mc == CEMIDevMgmt . MC_RESET_IND ) fireFrameReceived ( cemi ) ; else if ( mc == CEMIDevMgmt . MC_PROPREAD_CON || mc == CEMIDevMgmt . MC_PROPWRITE_CON ) { // invariant : notify listener before return from blocking send fireFrameReceived ( cemi ) ; setStateNotify ( OK ) ; } } else logger . warn ( "received dev.mgmt request channel-ID " + req . getChannelID ( ) + ", receive-sequence " + seq + ", expected " + getSeqNoRcv ( ) + " - ignored" ) ; } else logger . warn ( "received unknown frame (service type 0x" + Integer . toHexString ( svc ) + ") - ignored" ) ;
public class MemberMethod { /** * { @ inheritDoc } */ @ Override public Type getGenericType ( ) { } }
Type resultType ; if ( CheckUtil . checkBounds ( this . param , this . method . getGenericParameterTypes ( ) ) ) { resultType = this . method . getGenericParameterTypes ( ) [ this . param ] ; } else { resultType = this . method . getGenericReturnType ( ) ; } return resultType ;
public class OqlBuilder { /** * from . * @ param from a { @ link java . lang . String } object . * @ param < E > a E object . * @ return a { @ link org . beangle . commons . dao . query . builder . OqlBuilder } object . */ public static < E > OqlBuilder < E > from ( final String from ) { } }
OqlBuilder < E > query = new OqlBuilder < E > ( ) ; query . newFrom ( from ) ; return query ;
public class Widget { /** * Add another { @ link Widget } as a child of this one . * A { @ link GVRSceneObject } other than the one directly managed by the child * { @ code Widget } can be specified as the child ' s root . This is useful in * cases where the parent object needs to insert additional scene objects * between the child and its parent . * < b > NOTE : < / b > it is the responsibility of the caller to keep track of the * relationship between the child { @ code Widget } and the alternative root * scene object . * @ param child * The { @ code Widget } to add as a child . * @ param childRootSceneObject * The root { @ link GVRSceneObject } of the child . * @ param index * Position at which to add the child . Pass - 1 to add at end . * @ param preventLayout * The { @ code Widget } whether to call layout ( ) . * @ return { @ code True } if { @ code child } was added ; { @ code false } if * { @ code child } was previously added to this instance . */ protected boolean addChild ( final Widget child , final GVRSceneObject childRootSceneObject , final int index , boolean preventLayout ) { } }
final boolean added = addChildInner ( child , childRootSceneObject , index ) ; Log . d ( Log . SUBSYSTEM . WIDGET , TAG , "addChild [%s] %b" , child , added ) ; if ( added ) { onTransformChanged ( ) ; if ( ! preventLayout ) { invalidateAllLayouts ( ) ; requestLayout ( ) ; } } return added ;
public class JsonUtil { /** * Returns a field in a Json object as a boolean . * Throws IllegalArgumentException if the field value is null . * @ param object the Json Object * @ param field the field in the Json object to return * @ return the Json field value as a boolean */ public static boolean getBoolean ( JsonObject object , String field ) { } }
final JsonValue value = object . get ( field ) ; throwExceptionIfNull ( value , field ) ; return value . asBoolean ( ) ;
public class CloudInstanceController { /** * Cloud Instance Endpoints */ @ RequestMapping ( value = "/cloud/instance/refresh" , method = POST , consumes = APPLICATION_JSON_VALUE , produces = APPLICATION_JSON_VALUE ) public ResponseEntity < Collection < String > > refreshInstances ( @ Valid @ RequestBody ( required = false ) CloudInstanceListRefreshRequest request ) { } }
return ResponseEntity . ok ( ) . body ( cloudInstanceService . refreshInstances ( request ) ) ;
public class CompressUtil { /** * compress a source file / directory to a tar / bzip2 file * @ param sources * @ param target * @ param mode * @ throws IOException */ private static void compressTBZ2 ( Resource [ ] sources , Resource target , int mode ) throws IOException { } }
// File tmpTarget = File . createTempFile ( " _ temp " , " tmp " ) ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; compressTar ( sources , baos , mode ) ; _compressBZip2 ( new ByteArrayInputStream ( baos . toByteArray ( ) ) , target . getOutputStream ( ) ) ; // tmpTarget . delete ( ) ;
public class RequestXmlFactory { /** * Converts the specified list of PartETags to an XML fragment that can be * sent to the CompleteMultipartUpload operation of Amazon S3. * @ param partETags * The list of part ETags containing the data to include in the * new XML fragment . * @ return A byte array containing the data */ public static byte [ ] convertToXmlByteArray ( List < PartETag > partETags ) { } }
XmlWriter xml = new XmlWriter ( ) ; xml . start ( "CompleteMultipartUpload" ) ; if ( partETags != null ) { List < PartETag > sortedPartETags = new ArrayList < PartETag > ( partETags ) ; Collections . sort ( sortedPartETags , new Comparator < PartETag > ( ) { public int compare ( PartETag tag1 , PartETag tag2 ) { if ( tag1 . getPartNumber ( ) < tag2 . getPartNumber ( ) ) return - 1 ; if ( tag1 . getPartNumber ( ) > tag2 . getPartNumber ( ) ) return 1 ; return 0 ; } } ) ; for ( PartETag partEtag : sortedPartETags ) { xml . start ( "Part" ) ; xml . start ( "PartNumber" ) . value ( Integer . toString ( partEtag . getPartNumber ( ) ) ) . end ( ) ; xml . start ( "ETag" ) . value ( partEtag . getETag ( ) ) . end ( ) ; xml . end ( ) ; } } xml . end ( ) ; return xml . getBytes ( ) ;
public class SessionNode { /** * Get the segment for this node . * @ param cache the cache * @ param parent the parent node * @ return the segment * @ throws NodeNotFoundInParentException if the node doesn ' t exist in the referenced parent */ protected final Segment getSegment ( NodeCache cache , CachedNode parent ) { } }
if ( parent != null ) { ChildReference ref = parent . getChildReferences ( cache ) . getChild ( key ) ; if ( ref == null ) { // This node doesn ' t exist in the parent throw new NodeNotFoundInParentException ( key , parent . getKey ( ) ) ; } return ref . getSegment ( ) ; } // This is the root node . . . return workspace ( cache ) . childReferenceForRoot ( ) . getSegment ( ) ;
public class CmsWorkplace { /** * Generates a variable button bar separator line . < p > * @ param leftPixel the amount of pixel left to the line * @ param rightPixel the amount of pixel right to the line * @ param className the css class name for the formatting * @ return a variable button bar separator line */ public String buttonBarLine ( int leftPixel , int rightPixel , String className ) { } }
StringBuffer result = new StringBuffer ( 512 ) ; if ( leftPixel > 0 ) { result . append ( buttonBarLineSpacer ( leftPixel ) ) ; } result . append ( "<td><span class=\"" ) ; result . append ( className ) ; result . append ( "\"></span></td>\n" ) ; if ( rightPixel > 0 ) { result . append ( buttonBarLineSpacer ( rightPixel ) ) ; } return result . toString ( ) ;
public class RedBlackTreeInteger { /** * Remove the given key . The key MUST exists , else the tree won ' t be valid anymore . */ public void removeKey ( int key ) { } }
if ( key == first . value ) { removeMin ( ) ; return ; } if ( key == last . value ) { removeMax ( ) ; return ; } // if both children of root are black , set root to red if ( ( root . left == null || ! root . left . red ) && ( root . right == null || ! root . right . red ) ) root . red = true ; root = removeKey ( root , key ) ; if ( root != null ) root . red = false ;
public class AntPathMatcher { /** * Tests whether or not a string matches against a pattern . The pattern may contain two special characters : < br > ' * ' * means zero or more characters < br > ' ? ' means one and only one character * @ param pattern pattern to match against . Must not be < code > null < / code > . * @ param str string which must be matched against the pattern . Must not be < code > null < / code > . * @ return < code > true < / code > if the string matches against the pattern , or < code > false < / code > otherwise . */ private boolean matchStrings ( String pattern , String str , Map < String , String > uriTemplateVariables ) { } }
AntPathStringMatcher matcher = new AntPathStringMatcher ( pattern , str , uriTemplateVariables ) ; return matcher . matchStrings ( ) ;
public class ConfigClient { /** * Deletes an exclusion . * < p > Sample code : * < pre > < code > * try ( ConfigClient configClient = ConfigClient . create ( ) ) { * ExclusionName name = ProjectExclusionName . of ( " [ PROJECT ] " , " [ EXCLUSION ] " ) ; * configClient . deleteExclusion ( name ) ; * < / code > < / pre > * @ param name Required . The resource name of an existing exclusion to delete : * < p > " projects / [ PROJECT _ ID ] / exclusions / [ EXCLUSION _ ID ] " * " organizations / [ ORGANIZATION _ ID ] / exclusions / [ EXCLUSION _ ID ] " * " billingAccounts / [ BILLING _ ACCOUNT _ ID ] / exclusions / [ EXCLUSION _ ID ] " * " folders / [ FOLDER _ ID ] / exclusions / [ EXCLUSION _ ID ] " * < p > Example : ` " projects / my - project - id / exclusions / my - exclusion - id " ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void deleteExclusion ( ExclusionName name ) { } }
DeleteExclusionRequest request = DeleteExclusionRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; deleteExclusion ( request ) ;
public class CxxAstScanner { /** * Helper method for scanning a single file * @ param file is the file to be checked * @ param cxxConfig the plugin configuration * @ param visitors AST checks and visitors to use * @ param language for sensor * @ return file checked with measures and issues */ public static SourceFile scanSingleFileConfig ( CxxLanguage language , InputFile file , CxxConfiguration cxxConfig , SquidAstVisitor < Grammar > ... visitors ) { } }
if ( ! file . isFile ( ) ) { throw new IllegalArgumentException ( "File '" + file + "' not found." ) ; } AstScanner < Grammar > scanner = create ( language , cxxConfig , visitors ) ; scanner . scanFile ( file . file ( ) ) ; Collection < SourceCode > sources = scanner . getIndex ( ) . search ( new QueryByType ( SourceFile . class ) ) ; if ( sources . size ( ) != 1 ) { throw new IllegalStateException ( "Only one SourceFile was expected whereas " + sources . size ( ) + " has been returned." ) ; } return ( SourceFile ) sources . iterator ( ) . next ( ) ;
public class AbstractAggregatorImpl { /** * Returns the content type string for the specified filename based on the filename ' s extension . * Uses use both MimetypeFileTypeMap and FileNameMap ( from URLConnection ) because * MimetypeFileTypeMap provides a convenient way for the application to add new mappings , but * doesn ' t provide any default mappings , while FileNameMap , from URLConnection , provides default * mappings ( via < java _ home > / lib / content - types . properties ) but without easy extensibility . * @ param filename * the file name for which the content type is desired * @ return the content type ( mime part ) string , or application / octet - stream if no match is * found . */ protected String getContentType ( String filename ) { } }
String contentType = DEFAULT_CONTENT_TYPE ; if ( fileTypeMap != null ) { contentType = fileTypeMap . getContentType ( filename ) ; } if ( DEFAULT_CONTENT_TYPE . equals ( contentType ) && fileNameMap != null ) { String test = fileNameMap . getContentTypeFor ( filename ) ; if ( test != null ) { contentType = test ; } } return contentType ;
public class AbstractRuleImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case XtextPackage . ABSTRACT_RULE__NAME : setName ( NAME_EDEFAULT ) ; return ; case XtextPackage . ABSTRACT_RULE__TYPE : setType ( ( TypeRef ) null ) ; return ; case XtextPackage . ABSTRACT_RULE__ALTERNATIVES : setAlternatives ( ( AbstractElement ) null ) ; return ; case XtextPackage . ABSTRACT_RULE__ANNOTATIONS : getAnnotations ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class SessionEntityTypesClient { /** * Creates a session entity type . * < p > If the specified session entity type already exists , overrides the session entity type . * < p > Sample code : * < pre > < code > * try ( SessionEntityTypesClient sessionEntityTypesClient = SessionEntityTypesClient . create ( ) ) { * SessionName parent = SessionName . of ( " [ PROJECT ] " , " [ SESSION ] " ) ; * SessionEntityType sessionEntityType = SessionEntityType . newBuilder ( ) . build ( ) ; * SessionEntityType response = sessionEntityTypesClient . createSessionEntityType ( parent , sessionEntityType ) ; * < / code > < / pre > * @ param parent Required . The session to create a session entity type for . Format : * ` projects / & lt ; Project ID & gt ; / agent / sessions / & lt ; Session ID & gt ; ` or ` projects / & lt ; Project * ID & gt ; / agent / environments / & lt ; Environment ID & gt ; / users / & lt ; User ID & gt ; / * sessions / & lt ; Session ID & gt ; ` . If ` Environment ID ` is not specified , we assume default * ' draft ' environment . If ` User ID ` is not specified , we assume default ' - ' user . * @ param sessionEntityType Required . The session entity type to create . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final SessionEntityType createSessionEntityType ( SessionName parent , SessionEntityType sessionEntityType ) { } }
CreateSessionEntityTypeRequest request = CreateSessionEntityTypeRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . setSessionEntityType ( sessionEntityType ) . build ( ) ; return createSessionEntityType ( request ) ;
public class HessianProxyFactory { /** * JNDI object factory so the proxy can be used as a resource . */ public Object getObjectInstance ( Object obj , Name name , Context nameCtx , Hashtable < ? , ? > environment ) throws Exception { } }
Reference ref = ( Reference ) obj ; String api = null ; String url = null ; for ( int i = 0 ; i < ref . size ( ) ; i ++ ) { RefAddr addr = ref . get ( i ) ; String type = addr . getType ( ) ; String value = ( String ) addr . getContent ( ) ; if ( type . equals ( "type" ) ) api = value ; else if ( type . equals ( "url" ) ) url = value ; else if ( type . equals ( "user" ) ) setUser ( value ) ; else if ( type . equals ( "password" ) ) setPassword ( value ) ; } if ( url == null ) throw new NamingException ( "`url' must be configured for HessianProxyFactory." ) ; // XXX : could use meta protocol to grab this if ( api == null ) throw new NamingException ( "`type' must be configured for HessianProxyFactory." ) ; Class apiClass = Class . forName ( api , false , _loader ) ; return create ( apiClass , url ) ;
public class ASTHelper { /** * Creates a { @ link FieldDeclaration } . * @ param modifiers * modifiers * @ param type * type * @ param variable * variable declarator * @ return instance of { @ link FieldDeclaration } */ public static FieldDeclaration createFieldDeclaration ( int modifiers , Type type , VariableDeclarator variable ) { } }
List < VariableDeclarator > variables = new ArrayList < VariableDeclarator > ( ) ; variables . add ( variable ) ; FieldDeclaration ret = new FieldDeclaration ( modifiers , type , variables ) ; return ret ;
public class DevicesInner { /** * Uploads registration certificate for the device . * @ param deviceName The device name . * @ param resourceGroupName The resource group name . * @ param parameters The upload certificate request . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < UploadCertificateResponseInner > uploadCertificateAsync ( String deviceName , String resourceGroupName , UploadCertificateRequest parameters , final ServiceCallback < UploadCertificateResponseInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( uploadCertificateWithServiceResponseAsync ( deviceName , resourceGroupName , parameters ) , serviceCallback ) ;
public class UACRegisterSipServlet { /** * ( non - Javadoc ) * @ see javax . servlet . sip . SipServletListener # servletInitialized ( javax . servlet . sip . SipServletContextEvent ) */ public void servletInitialized ( SipServletContextEvent ce ) { } }
ServletContext servletContext = ce . getServletContext ( ) ; SipFactory sipFactory = ( SipFactory ) servletContext . getAttribute ( SIP_FACTORY ) ; String userName = servletContext . getInitParameter ( "user.name" ) ; String domainName = servletContext . getInitParameter ( "domain.name" ) ; SipApplicationSession sipApplicationSession = sipFactory . createApplicationSession ( ) ; // for REGISTER the from and the to headers are one and the same SipURI fromToURI = sipFactory . createSipURI ( userName , domainName ) ; SipServletRequest sipServletRequest = sipFactory . createRequest ( sipApplicationSession , "REGISTER" , fromToURI , fromToURI ) ; sipServletRequest . setHeader ( "Expires" , "3600" ) ; sipServletRequest . setHeader ( "User-Agent" , "MobicentsSipServlets" ) ; SipURI requestURI = sipFactory . createSipURI ( null , domainName ) ; try { Parameterable parameterable = sipServletRequest . getParameterableHeader ( "Contact" ) ; parameterable . setParameter ( "expires" , "0" ) ; } catch ( ServletParseException e1 ) { logger . error ( "Impossible to set the expires on the contact header" , e1 ) ; } try { sipServletRequest . setRequestURI ( requestURI ) ; sipServletRequest . send ( ) ; } catch ( IOException e ) { logger . error ( "An unexpected exception occured while sending the REGISTER request" , e ) ; }
public class MonitorMethodInterceptorDefinitionDecorator { /** * Register a new PerformanceMonitor with Spring if it does not already exist . * @ param beanName The name of the bean that this performance monitor is wrapped around * @ param registry The registry where all the spring beans are registered */ private void registerPerformanceMonitor ( String beanName , BeanDefinitionRegistry registry ) { } }
String perfMonitorName = beanName + "PerformanceMonitor" ; if ( ! registry . containsBeanDefinition ( perfMonitorName ) ) { BeanDefinitionBuilder initializer = BeanDefinitionBuilder . rootBeanDefinition ( PerformanceMonitorBean . class ) ; registry . registerBeanDefinition ( perfMonitorName , initializer . getBeanDefinition ( ) ) ; }
public class BaseGridTableScreen { /** * Create a data entry screen with this main record . * ( null means use this screen ' s grid record ) * @ param record The main record for the new form . * @ param iDocMode The document type of the new form . * @ param bReadCurrentRecord Sync the new screen with my current record ? * @ param bUseSameWindow Use the same window ? * @ return true if successful . */ public BasePanel onForm ( Record record , int iDocMode , boolean bReadCurrentRecord , int bUseSameWindow , Map < String , Object > properties ) { } }
if ( record == null ) record = this . getMainRecord ( ) ; if ( record == this . getMainRecord ( ) ) { try { int iSelection = this . getScreenFieldView ( ) . getSelectedRow ( ) ; if ( iSelection != - 1 ) { Record recAtTarget = null ; this . finalizeThisScreen ( ) ; // Validate current control , update record , get ready to close screen . recAtTarget = ( Record ) ( ( GridTable ) record . getTable ( ) ) . get ( iSelection ) ; if ( recAtTarget == null ) if ( record != null ) if ( ( iDocMode & ScreenConstants . DETAIL_MODE ) == ScreenConstants . DETAIL_MODE ) if ( record . getEditMode ( ) == DBConstants . EDIT_ADD ) if ( record . isModified ( ) ) { record . add ( ) ; Object bookmark = record . getLastModified ( DBConstants . BOOKMARK_HANDLE ) ; recAtTarget = record . setHandle ( bookmark , DBConstants . BOOKMARK_HANDLE ) ; } } else record . addNew ( ) ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; } } return super . onForm ( record , iDocMode , bReadCurrentRecord , bUseSameWindow , properties ) ;
public class BootstrapProgressBar { /** * Starts an infinite animation cycle which provides the visual effect of stripes moving * backwards . The current system time is used to offset tiled bitmaps of the progress background , * producing the effect that the stripes are moving backwards . */ private void startStripedAnimationIfNeeded ( ) { } }
if ( ! striped || ! animated ) { return ; } clearAnimation ( ) ; progressAnimator = ValueAnimator . ofFloat ( 0 , 0 ) ; progressAnimator . setDuration ( UPDATE_ANIM_MS ) ; progressAnimator . setRepeatCount ( ValueAnimator . INFINITE ) ; progressAnimator . setRepeatMode ( ValueAnimator . RESTART ) ; progressAnimator . setInterpolator ( new LinearInterpolator ( ) ) ; progressAnimator . addUpdateListener ( new ValueAnimator . AnimatorUpdateListener ( ) { @ Override public void onAnimationUpdate ( ValueAnimator animation ) { invalidate ( ) ; } } ) ; progressAnimator . start ( ) ;
public class DeleteFilesJob { /** * Deletes the files in the provided GoogleCloudStorageFileSet */ @ Override public Value < Void > run ( List < GcsFilename > files ) throws Exception { } }
for ( GcsFilename file : files ) { try { gcs . delete ( file ) ; } catch ( RetriesExhaustedException | IOException e ) { log . log ( Level . WARNING , "Failed to cleanup file: " + file , e ) ; } } return null ;
public class ModelMapper { /** * Transform an organization from database model to client / server model * @ param dbOrganization DbOrganization * @ return Organization */ public Organization getOrganization ( final DbOrganization dbOrganization ) { } }
final Organization organization = DataModelFactory . createOrganization ( dbOrganization . getName ( ) ) ; organization . getCorporateGroupIdPrefixes ( ) . addAll ( dbOrganization . getCorporateGroupIdPrefixes ( ) ) ; return organization ;
public class DaoTemplate { /** * 根据某个字段 ( 最好是唯一字段 ) 查询单个记录 < br > * 当有多条返回时 , 只显示查询到的第一条 * @ param < T > 字段值类型 * @ param field 字段名 * @ param value 字段值 * @ return 记录 * @ throws SQLException SQL执行异常 */ public < T > Entity get ( String field , T value ) throws SQLException { } }
return this . get ( Entity . create ( tableName ) . set ( field , value ) ) ;
public class JspConfigTypeImpl { /** * Returns all < code > jsp - property - group < / code > elements * @ return list of < code > jsp - property - group < / code > */ public List < JspPropertyGroupType < JspConfigType < T > > > getAllJspPropertyGroup ( ) { } }
List < JspPropertyGroupType < JspConfigType < T > > > list = new ArrayList < JspPropertyGroupType < JspConfigType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "jsp-property-group" ) ; for ( Node node : nodeList ) { JspPropertyGroupType < JspConfigType < T > > type = new JspPropertyGroupTypeImpl < JspConfigType < T > > ( this , "jsp-property-group" , childNode , node ) ; list . add ( type ) ; } return list ;
public class EmbedBuilder { /** * Updates all fields of the embed that satisfy the given predicate using the given updater . * @ param predicate The predicate that fields have to satisfy to get updated . * @ param updater The updater for the fields ; the { @ code EditableEmbedField } is only valid during the run of the * updater ; any try to save it in a variable and reuse it later after this method call will fail * with exceptions . * @ return The current instance in order to chain call methods . */ public EmbedBuilder updateFields ( Predicate < EmbedField > predicate , Consumer < EditableEmbedField > updater ) { } }
delegate . updateFields ( predicate , updater ) ; return this ;
public class OjbTagsHandler { /** * Processes the template for all reference definitions of the current class definition . * @ param template The template * @ param attributes The attributes of the tag * @ exception XDocletException if an error occurs * @ doc . tag type = " block " */ public void forAllReferenceDefinitions ( String template , Properties attributes ) throws XDocletException { } }
for ( Iterator it = _curClassDef . getReferences ( ) ; it . hasNext ( ) ; ) { _curReferenceDef = ( ReferenceDescriptorDef ) it . next ( ) ; // first we check whether it is an inherited anonymous reference if ( _curReferenceDef . isAnonymous ( ) && ( _curReferenceDef . getOwner ( ) != _curClassDef ) ) { continue ; } if ( ! isFeatureIgnored ( LEVEL_REFERENCE ) && ! _curReferenceDef . getBooleanProperty ( PropertyHelper . OJB_PROPERTY_IGNORE , false ) ) { generate ( template ) ; } } _curReferenceDef = null ;
public class DistanceFormatter { /** * Method that can be used to check if an instance of { @ link DistanceFormatter } * needs to be updated based on the passed language / unitType . * @ param language to check against the current formatter language * @ param unitType to check against the current formatter unitType * @ return true if new formatter is needed , false otherwise */ public boolean shouldUpdate ( @ NonNull String language , @ NonNull String unitType , int roundingIncrement ) { } }
return ! this . language . equals ( language ) || ! this . unitType . equals ( unitType ) || ! ( this . roundingIncrement == roundingIncrement ) ;
public class CleverTapAPI { /** * Lifecycle */ private void activityResumed ( Activity activity ) { } }
getConfigLogger ( ) . verbose ( getAccountId ( ) , "App in foreground" ) ; checkTimeoutSession ( ) ; if ( ! isAppLaunchPushed ( ) ) { pushAppLaunchedEvent ( ) ; onTokenRefresh ( ) ; } if ( ! inCurrentSession ( ) ) { // onTokenRefresh ( ) ; pushInitialEventsAsync ( ) ; } checkPendingInAppNotifications ( activity ) ;
public class NameNameHtmlRenderer { /** * { @ inheritDoc } */ @ Override public final String getNameHtml ( ) { } }
final Name name = nameRenderer . getGedObject ( ) ; final StringBuilder builder = new StringBuilder ( 40 ) ; final String prefix = GedRenderer . escapeString ( name . getPrefix ( ) ) ; final String surname = GedRenderer . escapeString ( name . getSurname ( ) ) ; final String suffix = GedRenderer . escapeString ( name . getSuffix ( ) ) ; builder . append ( prefix ) ; builder . append ( " <span class=\"surname\">" ) ; builder . append ( surname ) ; builder . append ( "</span>" ) ; if ( ! suffix . isEmpty ( ) ) { builder . append ( ' ' ) . append ( suffix ) ; } return builder . toString ( ) ;
public class OSXAdapter { /** * See setFileHandler above for an example */ public boolean callTarget ( Object appleEvent ) throws InvocationTargetException , IllegalAccessException { } }
Object result = targetMethod . invoke ( targetObject , ( Object [ ] ) null ) ; if ( result == null ) { return true ; } return Boolean . valueOf ( result . toString ( ) ) . booleanValue ( ) ;
public class DateUtils { /** * Sets the minute field to a date returning a new object . * The original { @ code Date } is unchanged . * @ param date the date , not null * @ param amount the amount to set * @ return a new { @ code Date } set with the specified value * @ throws IllegalArgumentException if the date is null * @ since 2.4 */ public static Date setMinutes ( final Date date , final int amount ) { } }
return set ( date , Calendar . MINUTE , amount ) ;
public class TypeMappingsFactory { /** * Get the type id ( defined by this class ) for the given class . * @ param classType Class to get type of . * @ return Type id of class . */ public int getTypeId ( Class classType ) { } }
final Class origType = classType ; while ( null != classType ) { Integer typeObj = ( Integer ) _typeMap . get ( classType ) ; if ( null != typeObj ) { return typeObj . intValue ( ) ; } classType = classType . getSuperclass ( ) ; } // special check for blobs / clobs they are interfaces not derived from if ( Blob . class . isAssignableFrom ( origType ) ) { return _typeMap . get ( Blob . class ) . intValue ( ) ; } else if ( Clob . class . isAssignableFrom ( origType ) ) { return _typeMap . get ( Clob . class ) . intValue ( ) ; } return TYPE_UNKNOWN ;
public class GrpcUtil { /** * Closes an InputStream , ignoring IOExceptions . * This method exists because Guava ' s { @ code Closeables . closeQuietly ( ) } is beta . */ public static void closeQuietly ( @ Nullable InputStream message ) { } }
if ( message == null ) { return ; } try { message . close ( ) ; } catch ( IOException ioException ) { // do nothing except log log . log ( Level . WARNING , "exception caught in closeQuietly" , ioException ) ; }
public class SRE { /** * Replies all the installed SRE into the class path . * @ param onlyInstalledInJRE indicates if the services will be considered only into the libraries that are * installed into the JRE . If { @ code true } , only the libraries into the JRE will be considered and * the application libraries will be ignored . If { @ code false } , the application libraries will be * considered as well . * @ return the installed SRE . */ @ Pure public static ServiceLoader < SREBootstrap > getServiceLoader ( boolean onlyInstalledInJRE ) { } }
synchronized ( SRE . class ) { ServiceLoader < SREBootstrap > sl = loader == null ? null : loader . get ( ) ; if ( sl == null ) { if ( onlyInstalledInJRE ) { sl = ServiceLoader . loadInstalled ( SREBootstrap . class ) ; } else { sl = ServiceLoader . load ( SREBootstrap . class ) ; } loader = new SoftReference < > ( sl ) ; } return sl ; }
public class GitHubClientCacheOps { /** * Removes directories with caches * @ param caches paths to directories to be removed */ private static void deleteEveryIn ( DirectoryStream < Path > caches ) { } }
for ( Path notActualCache : caches ) { LOGGER . debug ( "Deleting redundant cache dir {}" , notActualCache ) ; try { FileUtils . deleteDirectory ( notActualCache . toFile ( ) ) ; } catch ( IOException e ) { LOGGER . error ( "Can't delete cache dir <{}>" , notActualCache , e ) ; } }
public class BraveTracerFactory { /** * The Open Tracing { @ link Tracer } bean . * @ param tracing The { @ link Tracing } bean * @ return The Open Tracing { @ link Tracer } bean */ @ Singleton @ Requires ( classes = { } }
BraveTracer . class , Tracer . class } ) @ Primary Tracer braveTracer ( Tracing tracing ) { BraveTracer braveTracer = BraveTracer . create ( tracing ) ; if ( ! GlobalTracer . isRegistered ( ) ) { GlobalTracer . register ( braveTracer ) ; } return braveTracer ;
public class UserSeedChangeListener { /** * Will notify all the registered listeners about the event * @ param user The target user */ public static void fireUserSeedRenewed ( @ Nonnull User user ) { } }
for ( UserSeedChangeListener l : all ( ) ) { try { l . onUserSeedRenewed ( user ) ; } catch ( Exception e ) { LOGGER . log ( Level . WARNING , "Exception caught during onUserSeedRenewed event" , e ) ; } }
public class HttpClient { /** * Performs multipart post * @ param url * @ param map * @ throws Exception */ public void postMultipart ( String url , Map < String , String > map ) throws Exception { } }
String boundaryStr = "-----boundary0" ; StringBuilder sb = new StringBuilder ( ) ; map . forEach ( ( k , v ) -> { sb . append ( boundaryStr + "\r" ) ; sb . append ( "Content-Disposition: form-data; name=\"" + k + "\"\r" ) ; sb . append ( "\r" ) ; sb . append ( v ) ; sb . append ( "\r" ) ; } ) ; String request = "POST " + url + " HTTP/1.0\r" + "Content-Type: multipart/form-data; boundary=" + boundaryStr + "\r" + "Content-Length: " + sb . length ( ) + "\r" + "\r" + sb ; request ( request , null ) ;
public class DebugPseudoAtom { /** * { @ inheritDoc } */ @ Override public void setPoint2d ( Point2d point2d ) { } }
logger . debug ( "Setting point2d: x=" + point2d . x + ", y=" + point2d . y ) ; super . setPoint2d ( point2d ) ;
public class IndyGuardsFiltersAndSignatures { /** * This method is called by he handle to realize the bean constructor * with property map . */ public static Object setBeanProperties ( MetaClass mc , Object bean , Map properties ) { } }
for ( Iterator iter = properties . entrySet ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { Map . Entry entry = ( Map . Entry ) iter . next ( ) ; String key = entry . getKey ( ) . toString ( ) ; Object value = entry . getValue ( ) ; mc . setProperty ( bean , key , value ) ; } return bean ;
public class CpCommand { /** * Preserves attributes from the source file to the target file . * @ param srcPath the source path * @ param dstPath the destination path in the Alluxio filesystem */ private void preserveAttributes ( AlluxioURI srcPath , AlluxioURI dstPath ) throws IOException , AlluxioException { } }
if ( mPreservePermissions ) { URIStatus srcStatus = mFileSystem . getStatus ( srcPath ) ; mFileSystem . setAttribute ( dstPath , SetAttributePOptions . newBuilder ( ) . setOwner ( srcStatus . getOwner ( ) ) . setGroup ( srcStatus . getGroup ( ) ) . setMode ( new Mode ( ( short ) srcStatus . getMode ( ) ) . toProto ( ) ) . build ( ) ) ; mFileSystem . setAcl ( dstPath , SetAclAction . REPLACE , srcStatus . getAcl ( ) . getEntries ( ) ) ; }
public class ExtractorUtils { /** * Naive implementation of the difference in days between two dates */ private static long daysBetween ( Date date1 , Date date2 ) { } }
long diff ; if ( date2 . after ( date1 ) ) { diff = date2 . getTime ( ) - date1 . getTime ( ) ; } else { diff = date1 . getTime ( ) - date2 . getTime ( ) ; } return diff / ( 24 * 60 * 60 * 1000 ) ;
public class UpdatePreferencesServlet { /** * Update the user ' s preferred skin . * @ param request HTTP Request * @ param skinName name of the Skin * @ throws IOException * @ throws PortalException */ @ RequestMapping ( method = RequestMethod . POST , params = "action=chooseSkin" ) public ModelAndView chooseSkin ( HttpServletRequest request , @ RequestParam String skinName ) throws IOException { } }
this . stylesheetUserPreferencesService . setStylesheetParameter ( request , PreferencesScope . THEME , "skin" , skinName ) ; return new ModelAndView ( "jsonView" , Collections . emptyMap ( ) ) ;
public class JCRStoreResource { /** * The method is called from the transaction manager if the complete * transaction is completed . < br / > * @ param _ xid global transaction identifier ( not used , because each * file with the file id gets a new VFS store resource * instance ) * @ param _ onePhase < i > true < / i > if it is a one phase commitment transaction * ( not used ) * @ throws XAException if any exception occurs ( catch on * { @ link java . lang . Throwable } ) */ @ Override public void commit ( final Xid _xid , final boolean _onePhase ) throws XAException { } }
try { if ( getSession ( ) . hasPendingChanges ( ) ) { getSession ( ) . save ( ) ; } getSession ( ) . logout ( ) ; } catch ( final AccessDeniedException e ) { throw new XAException ( "AccessDeniedException" ) ; } catch ( final ItemExistsException e ) { throw new XAException ( "ItemExistsException" ) ; } catch ( final ReferentialIntegrityException e ) { throw new XAException ( "ReferentialIntegrityException" ) ; } catch ( final ConstraintViolationException e ) { throw new XAException ( "AccessDeniedException" ) ; } catch ( final InvalidItemStateException e ) { throw new XAException ( "InvalidItemStateException" ) ; } catch ( final VersionException e ) { throw new XAException ( "VersionException" ) ; } catch ( final LockException e ) { throw new XAException ( XAException . XA_RBDEADLOCK ) ; } catch ( final NoSuchNodeTypeException e ) { throw new XAException ( "NoSuchNodeTypeException" ) ; } catch ( final RepositoryException e ) { throw new XAException ( "RepositoryException" ) ; } catch ( final EFapsException e ) { throw new XAException ( "RepositoryException" ) ; }
public class RestTemplate { /** * OPTIONS */ public Set < HttpMethod > optionsForAllow ( String url , Object ... urlVariables ) throws RestClientException { } }
HttpHeaders headers = execute ( url , HttpMethod . OPTIONS , null , this . headersExtractor , urlVariables ) ; return headers . getAllow ( ) ;
public class UserClient { /** * Set user ' s group message blocking * @ param payload GroupShieldPayload * @ param username Necessary * @ return No content * @ throws APIConnectionException connect exception * @ throws APIRequestException request exception */ public ResponseWrapper setGroupShield ( GroupShieldPayload payload , String username ) throws APIConnectionException , APIRequestException { } }
Preconditions . checkArgument ( null != payload , "GroupShieldPayload should not be null" ) ; StringUtils . checkUsername ( username ) ; return _httpClient . sendPost ( _baseUrl + userPath + "/" + username + "/groupsShield" , payload . toString ( ) ) ;
public class GaliosFieldOps { /** * < p > Multiply the two polynomials together . The technique used here isn ' t the fastest but is easy * to understand . < / p > * < p > NOTE : No modulus operation is performed so the result might not be a member of the same field . < / p > * @ param a polynomial * @ param b polynomial * @ return result polynomial */ public static int multiply ( int a , int b ) { } }
int z = 0 ; for ( int i = 0 ; ( b >> i ) > 0 ; i ++ ) { if ( ( b & ( 1 << i ) ) != 0 ) { z ^= a << i ; } } return z ;
public class RestClientUtil { /** * 更新索引定义 : my _ index / _ mapping * https : / / www . elastic . co / guide / en / elasticsearch / reference / 7.0 / indices - put - mapping . html * @ param indexMapping * @ return * @ throws ElasticSearchException */ public String updateIndiceMapping ( String action , String indexMapping ) throws ElasticSearchException { } }
try { return this . client . executeHttp ( action , indexMapping , ClientUtil . HTTP_POST ) ; } catch ( ElasticSearchException e ) { return ResultUtil . hand404HttpRuntimeException ( e , String . class , ResultUtil . OPERTYPE_updateIndiceMapping ) ; }
public class DateTimeUtils { /** * Converts a { @ code java . sql . Date } to a { @ code LocalDate } . * @ param sqlDate the SQL date , not null * @ return the local date , not null */ @ SuppressWarnings ( "deprecation" ) public static LocalDate toLocalDate ( java . sql . Date sqlDate ) { } }
return LocalDate . of ( sqlDate . getYear ( ) + 1900 , sqlDate . getMonth ( ) + 1 , sqlDate . getDate ( ) ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertCDDXocBaseToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class Transformers { static < T > T transform ( Object value , Class < T > toClazz , Registry registry ) { } }
try { if ( toClazz . isInstance ( value ) ) { return ( T ) value ; } else if ( value instanceof BindObject ) { T bean = newInstance ( toClazz ) ; for ( Map . Entry < String , Object > entry : ( BindObject ) value ) { Class < ? > requiredType = PropertyUtils . getPropertyType ( toClazz , entry . getKey ( ) ) ; Object propValue = transform ( entry . getValue ( ) , requiredType , registry ) ; if ( propValue != null ) { PropertyUtils . writeProperty ( bean , entry . getKey ( ) , propValue ) ; } } return bean ; } else if ( value instanceof Optional ) { Optional < ? > optional = ( Optional ) value ; if ( toClazz == Optional . class ) { Class < ? > targetType = PropertyUtils . getGenericParamTypes ( toClazz ) [ 0 ] ; return ( T ) optional . map ( v -> transform ( v , targetType , registry ) ) ; } else { return transform ( optional . orElse ( null ) , toClazz , registry ) ; } } else if ( value instanceof Map ) { Map < ? , ? > values = ( Map ) value ; if ( Map . class . isAssignableFrom ( toClazz ) ) { Class < ? > keyType = PropertyUtils . getGenericParamTypes ( toClazz ) [ 0 ] ; Class < ? > valueType = PropertyUtils . getGenericParamTypes ( toClazz ) [ 1 ] ; values = values . entrySet ( ) . stream ( ) . map ( e -> entry ( transform ( e . getKey ( ) , keyType , registry ) , transform ( e . getValue ( ) , valueType , registry ) ) ) . collect ( Collectors . toMap ( Map . Entry :: getKey , Map . Entry :: getValue ) ) ; return doTransform ( values , toClazz , registry ) ; } else throw new IllegalArgumentException ( "INCOMPATIBLE transform: " + value . getClass ( ) . getName ( ) + " -> " + toClazz . getName ( ) ) ; } else if ( value instanceof Collection ) { Collection < ? > values = ( Collection ) value ; if ( Collection . class . isAssignableFrom ( toClazz ) || toClazz . isArray ( ) ) { Class < ? > elemType = toClazz . isArray ( ) ? toClazz . getComponentType ( ) : PropertyUtils . getGenericParamTypes ( toClazz ) [ 0 ] ; values = values . stream ( ) . map ( v -> transform ( v , elemType , registry ) ) . collect ( Collectors . < Object > toList ( ) ) ; return doTransform ( values , toClazz , registry ) ; } else throw new IllegalArgumentException ( "INCOMPATIBLE transform: " + value . getClass ( ) . getName ( ) + " -> " + toClazz . getName ( ) ) ; } else { return doTransform ( value , toClazz , registry ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class StaticCATXATransaction { /** * Calls setTransactionTimeout ( ) on the SIXAResource . * Fields : * BIT16 XAResourceId * BIT32 Timeout * @ param request * @ param conversation * @ param requestNumber * @ param allocatedFromBufferPool * @ param partOfExchange */ public static void rcvXA_setTxTimeout ( CommsByteBuffer request , Conversation conversation , int requestNumber , boolean allocatedFromBufferPool , boolean partOfExchange ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "rcvXA_setTxTimeout" , new Object [ ] { request , conversation , "" + requestNumber , "" + allocatedFromBufferPool , "" + partOfExchange } ) ; try { int clientTransactionId = request . getInt ( ) ; int timeout = request . getInt ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "XAResource Object ID" , clientTransactionId ) ; SibTr . debug ( tc , "Timeout" , timeout ) ; } // Get the transaction out of the table ServerLinkLevelState linkState = ( ServerLinkLevelState ) conversation . getLinkLevelAttachment ( ) ; SITransaction tran = linkState . getTransactionTable ( ) . get ( clientTransactionId , true ) ; boolean success = false ; if ( ( tran != null ) && ( tran != IdToTransactionTable . INVALID_TRANSACTION ) ) { // tran may be null if the client is calling this method on // an unenlisted XA resource . // Get the actual transaction . . . SIXAResource xaResource = ( SIXAResource ) tran ; // Now call the method on the XA resource success = xaResource . setTransactionTimeout ( timeout ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Result: " + success ) ; CommsByteBuffer reply = poolManager . allocate ( ) ; if ( success ) { reply . put ( ( byte ) 1 ) ; } else { reply . put ( ( byte ) 0 ) ; } try { conversation . send ( reply , JFapChannelConstants . SEG_XA_SETTXTIMEOUT_R , requestNumber , JFapChannelConstants . PRIORITY_MEDIUM , true , ThrottlingPolicy . BLOCK_THREAD , null ) ; } catch ( SIException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".rcvXA_setTxTimeout" , CommsConstants . STATICCATXATRANSACTION_SETTXTIMEOUT_01 ) ; SibTr . error ( tc , "COMMUNICATION_ERROR_SICO2027" , e ) ; } } catch ( XAException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".rcvXA_setTxTimeout" , CommsConstants . STATICCATXATRANSACTION_SETTXTIMEOUT_02 ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "XAException - RC: " + e . errorCode , e ) ; StaticCATHelper . sendExceptionToClient ( e , CommsConstants . STATICCATXATRANSACTION_SETTXTIMEOUT_02 , conversation , requestNumber ) ; } request . release ( allocatedFromBufferPool ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "rcvXA_setTxTimeout" ) ;
public class LogQueryTool { /** * Get query , truncated if to big . * @ param buffer current query buffer * @ return possibly truncated query if too big */ private String subQuery ( ByteBuffer buffer ) { } }
String queryString ; if ( options . maxQuerySizeToLog == 0 ) { queryString = new String ( buffer . array ( ) , 5 , buffer . limit ( ) ) ; } else { queryString = new String ( buffer . array ( ) , 5 , Math . min ( buffer . limit ( ) - 5 , ( options . maxQuerySizeToLog * 3 ) ) ) ; if ( queryString . length ( ) > options . maxQuerySizeToLog - 3 ) { queryString = queryString . substring ( 0 , options . maxQuerySizeToLog - 3 ) + "..." ; } } return queryString ;
public class ArbitrateFactory { /** * 销毁和释放对应pipelineId的仲裁资源 * @ param pipelineId */ public static < T extends ArbitrateLifeCycle > void destory ( Long pipelineId , Class < T > instanceClass ) { } }
Map < Class , Object > resources = cache . get ( pipelineId ) ; if ( resources != null ) { Object obj = resources . remove ( instanceClass ) ; if ( obj instanceof ArbitrateLifeCycle ) { ArbitrateLifeCycle lifeCycle = ( ArbitrateLifeCycle ) obj ; lifeCycle . destory ( ) ; // 调用销毁方法 } }
public class ClustersInner { /** * Executes script actions on the specified HDInsight cluster . * @ param resourceGroupName The name of the resource group . * @ param clusterName The name of the cluster . * @ param parameters The parameters for executing script actions . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > beginExecuteScriptActionsAsync ( String resourceGroupName , String clusterName , ExecuteScriptActionParameters parameters ) { } }
return beginExecuteScriptActionsWithServiceResponseAsync ( resourceGroupName , clusterName , parameters ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class SimpleDocTreeVisitor { /** * { @ inheritDoc } This implementation calls { @ code defaultAction } . * @ param node { @ inheritDoc } * @ param p { @ inheritDoc } * @ return the result of { @ code defaultAction } */ @ Override public R visitValue ( ValueTree node , P p ) { } }
return defaultAction ( node , p ) ;
public class MapEntry { /** * / * ( non - Javadoc ) * @ see com . google . protobuf . MessageOrBuilder # getField ( com . google . protobuf . Descriptors . FieldDescriptor ) */ @ Override public Object getField ( FieldDescriptor field ) { } }
checkFieldDescriptor ( field ) ; Object result = field . getNumber ( ) == 1 ? getKey ( ) : getValue ( ) ; // Convert enums to EnumValueDescriptor . if ( field . getType ( ) == FieldDescriptor . Type . ENUM ) { result = field . getEnumType ( ) . findValueByNumberCreatingIfUnknown ( ( java . lang . Integer ) result ) ; } return result ;
public class PkiOperationResponseHandler { /** * { @ inheritDoc } */ @ Override public CMSSignedData getResponse ( final byte [ ] content , final String mimeType ) throws ContentException { } }
if ( mimeType . startsWith ( PKI_MESSAGE ) ) { try { return new CMSSignedData ( content ) ; } catch ( CMSException e ) { throw new InvalidContentException ( e ) ; } } else { throw new InvalidContentTypeException ( mimeType , PKI_MESSAGE ) ; }
public class AWSIoTAnalyticsClient { /** * Creates the content of a data set by applying a " queryAction " ( a SQL query ) or a " containerAction " ( executing a * containerized application ) . * @ param createDatasetContentRequest * @ return Result of the CreateDatasetContent operation returned by the service . * @ throws InvalidRequestException * The request was not valid . * @ throws ResourceNotFoundException * A resource with the specified name could not be found . * @ throws InternalFailureException * There was an internal failure . * @ throws ServiceUnavailableException * The service is temporarily unavailable . * @ throws ThrottlingException * The request was denied due to request throttling . * @ sample AWSIoTAnalytics . CreateDatasetContent * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iotanalytics - 2017-11-27 / CreateDatasetContent " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateDatasetContentResult createDatasetContent ( CreateDatasetContentRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateDatasetContent ( request ) ;
public class CmsUploadPropertyPanel { /** * Sets up the property editor . < p > */ protected void initializePropertyEditor ( ) { } }
Map < String , CmsXmlContentProperty > propertyConfig = m_values . getPropertyDefinitions ( ) ; m_propertyEditorHandler = new CmsUploadPropertyEditorHandler ( m_dialog , m_values ) ; CmsSimplePropertyEditor propertyEditor = new CmsUploadPropertyEditor ( propertyConfig , m_propertyEditorHandler ) ; propertyEditor . getForm ( ) . setFormHandler ( this ) ; m_propertyEditor = propertyEditor ; m_propertyEditor . initializeWidgets ( null ) ; A_CmsFormFieldPanel propertiesPanel = m_propertyEditor . getForm ( ) . getWidget ( ) ; add ( propertiesPanel ) ;
public class CmsDynamicFunctionParser { /** * Parses a dynamic function bean from an in - memory XML content object . < p > * @ param cms the current CMS context * @ param content the XML content from which to read the dynamic function bean * @ return the dynamic function bean read from the XML content * @ throws CmsException if something goes wrong */ public CmsDynamicFunctionBean parseFunctionBean ( CmsObject cms , CmsXmlContent content ) throws CmsException { } }
Locale locale = getLocaleToUse ( cms , content ) ; String oldSiteRoot = cms . getRequestContext ( ) . getSiteRoot ( ) ; try { cms . getRequestContext ( ) . setSiteRoot ( "" ) ; CmsResource functionFormatter = getFunctionFormatter ( cms ) ; CmsXmlContentRootLocation root = new CmsXmlContentRootLocation ( content , locale ) ; CmsDynamicFunctionBean functionBean = parseFunctionBean ( cms , root , content . getFile ( ) , functionFormatter ) ; return functionBean ; } finally { cms . getRequestContext ( ) . setSiteRoot ( oldSiteRoot ) ; }
public class SipStandardManager { /** * Set the maximum number of actives Sip Sessions allowed , or - 1 for no * limit . * @ param max * The new maximum number of sip sessions */ public void setMaxActiveSipSessions ( int max ) { } }
int oldMaxActiveSipSessions = this . sipManagerDelegate . getMaxActiveSipSessions ( ) ; this . sipManagerDelegate . setMaxActiveSipSessions ( max ) ; support . firePropertyChange ( "maxActiveSipSessions" , Integer . valueOf ( oldMaxActiveSipSessions ) , Integer . valueOf ( this . sipManagerDelegate . getMaxActiveSipSessions ( ) ) ) ;
public class MatchTarget { /** * Creates a clone of this MatchTarget . Override only if the system clone support does * not produce a correct result . */ public MatchTarget duplicate ( ) { } }
try { return ( MatchTarget ) clone ( ) ; } catch ( CloneNotSupportedException e ) { // No FFDC Code Needed . // FFDC driven by wrapper class . FFDC . processException ( cclass , "com.ibm.ws.sib.matchspace.MatchTarget.duplicate" , e , "1:112:1.15" ) ; // should not happen throw new IllegalStateException ( ) ; }
public class QueryUtils { /** * Returns type of query from given query string . * @ param query a query string * @ return type of query * @ since 1.4 */ public static QueryType getQueryType ( String query ) { } }
final String trimmedQuery = removeCommentAndWhiteSpace ( query ) ; if ( trimmedQuery == null || trimmedQuery . length ( ) < 1 ) { return QueryType . OTHER ; } final char firstChar = trimmedQuery . charAt ( 0 ) ; final QueryType type ; switch ( firstChar ) { case 'S' : case 's' : type = QueryType . SELECT ; break ; case 'I' : case 'i' : type = QueryType . INSERT ; break ; case 'U' : case 'u' : type = QueryType . UPDATE ; break ; case 'D' : case 'd' : type = QueryType . DELETE ; break ; default : type = QueryType . OTHER ; } return type ;
public class Configuration { /** * Return value matching this enumerated type . * @ param name Property name * @ throws NullPointerException if the configuration property does not exist * @ throws IllegalArgumentException If mapping is illegal for the type * provided */ public < T extends Enum < T > > T getEnum ( String name , Class < T > declaringClass ) { } }
final String val = get ( name ) ; Preconditions . checkNotNull ( val ) ; return Enum . valueOf ( declaringClass , val ) ;
public class AsyncMutateInBuilder { /** * Perform several { @ link Mutation mutation } operations inside a single existing { @ link JsonDocument JSON document } * and watch for durability requirements . * The list of mutations and paths to mutate in the JSON is added through builder methods like * { @ link # arrayInsert ( String , Object ) } . * Multi - mutations are applied as a whole , atomically at the document level . That means that if one of the mutations * fails , none of the mutations are applied . Otherwise , all mutations can be considered successful and the whole * operation will receive a { @ link DocumentFragment } with the updated cas ( and optionally { @ link MutationToken } ) . * The subdocument API has the benefit of only transmitting the fragment of the document you want to mutate * on the wire , instead of the whole document . * This Observable most notable error conditions are : * - The enclosing document does not exist : { @ link DocumentDoesNotExistException } * - The enclosing document is not JSON : { @ link DocumentNotJsonException } * - No mutation was defined through the builder API : { @ link IllegalArgumentException } * - A mutation spec couldn ' t be encoded and the whole operation was cancelled : { @ link TranscodingException } * - The multi - mutation failed : { @ link MultiMutationException } * - The durability constraint could not be fulfilled because of a temporary or persistent problem : { @ link DurabilityException } * - CAS was provided but optimistic locking failed : { @ link CASMismatchException } * When receiving a { @ link MultiMutationException } , one can inspect the exception to find the zero - based index and * error { @ link ResponseStatus status code } of the first failing { @ link Mutation } . Subsequent mutations may have * also failed had they been attempted , but a single spec failing causes the whole operation to be cancelled . * Other top - level error conditions are similar to those encountered during a document - level { @ link AsyncBucket # replace ( Document ) } . * A { @ link DurabilityException } typically happens if the given amount of replicas needed to fulfill the durability * requirement cannot be met because either the bucket does not have enough replicas configured or they are not * available in a failover event . As an example , if one replica is configured and { @ link ReplicateTo # TWO } is used , * the observable is errored with a { @ link DurabilityException } . The same can happen if one replica is configured , * but one node has been failed over and not yet rebalanced ( hence , on a subset of the partitions there is no * replica available ) . * * It is important to understand that the original execute has already happened , so the actual * execute and the watching for durability requirements are two separate tasks internally . * * * @ param persistTo the persistence requirement to watch . * @ param timeout the specific timeout to apply for the operation . * @ param timeUnit the time unit for the timeout . * @ return an { @ link Observable } of a single { @ link DocumentFragment } ( if successful ) containing updated cas metadata . * Note that some individual results could also bear a value , like counter operations . */ public Observable < DocumentFragment < Mutation > > execute ( PersistTo persistTo , long timeout , TimeUnit timeUnit ) { } }
return execute ( persistTo , ReplicateTo . NONE , timeout , timeUnit ) ;
public class Interceptors { /** * Creates a ternary interceptor chain . * @ param < R > the function result type * @ param < T1 > the function first parameter type * @ param < T2 > the function second parameter type * @ param < T3 > the function third parameter type * @ param < I > the ternary interceptor type * @ param innermost the function to be intercepted * @ param interceptors an iterator of interceptors * @ return the resulting function */ public static < R , T1 , T2 , T3 , I extends TernaryInterceptor < T1 , T2 , T3 > > TriFunction < T1 , T2 , T3 , R > intercept ( TriFunction < T1 , T2 , T3 , R > innermost , Iterator < I > interceptors ) { } }
return new TernaryInterceptorChain < T1 , T2 , T3 , R > ( innermost , interceptors ) ;
public class XmlInOut { /** * Export this table . * @ record The record to export . * @ strFileName The destination filename ( deleted the old copy if this file exists ) . */ public boolean exportXML ( BaseTable table , String strFileName ) { } }
Record record = table . getRecord ( ) ; boolean bSuccess = true ; File file = new File ( strFileName ) ; if ( file . exists ( ) ) file . delete ( ) ; // Delete if it exists else { String strPath = file . getParent ( ) ; File fileDir = new File ( strPath ) ; fileDir . mkdirs ( ) ; } XmlInOut . enableAllBehaviors ( record , false , true ) ; // Disable file behaviors Document doc = XmlUtilities . exportFileToDoc ( table ) ; try { OutputStream fileout = new FileOutputStream ( strFileName ) ; Writer out = new OutputStreamWriter ( fileout , XmlUtilities . XML_ENCODING ) ; // , MIME2Java . convert ( " UTF - 8 " ) ) ; Utility . convertDOMToXML ( doc , out ) ; out . close ( ) ; fileout . close ( ) ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; bSuccess = false ; } XmlInOut . enableAllBehaviors ( record , true , true ) ; return bSuccess ;
public class AbstractTypeConvertingMap { /** * Helper method for obtaining long value from parameter * @ param name The name of the parameter * @ return The long value or null if there isn ' t one */ public Long getLong ( String name ) { } }
Object o = get ( name ) ; if ( o instanceof Number ) { return ( ( Number ) o ) . longValue ( ) ; } if ( o != null ) { try { return Long . parseLong ( o . toString ( ) ) ; } catch ( NumberFormatException e ) { } } return null ;
public class JsonXOutput { /** * Resets this output for re - use . */ public JsonXOutput clear ( boolean clearBuffer ) { } }
if ( clearBuffer ) tail = head . clear ( ) ; lastRepeated = false ; lastNumber = 0 ; return this ;
public class DeviceAttribute_3DAODefaultImpl { public void insert_ul ( final int [ ] argin ) { } }
attrval . r_dim . dim_x = argin . length ; attrval . r_dim . dim_y = 0 ; DevVarULongArrayHelper . insert ( attrval . value , argin ) ;
public class Studio { /** * Listens to key to close a | window | . */ private static final KeyAdapter closeKeyStrokes ( final java . awt . Window window ) { } }
return new KeyAdapter ( ) { public void keyReleased ( KeyEvent e ) { final int kc = e . getKeyCode ( ) ; if ( kc == KeyEvent . VK_ESCAPE || kc == KeyEvent . VK_ENTER ) { window . dispose ( ) ; } } } ;
public class TasksImpl { /** * Updates the properties of the specified task . * @ param jobId The ID of the job containing the task . * @ param taskId The ID of the task to update . * @ param constraints Constraints that apply to this task . If omitted , the task is given the default constraints . For multi - instance tasks , updating the retention time applies only to the primary task and not subtasks . * @ param taskUpdateOptions Additional parameters for the operation * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > updateAsync ( String jobId , String taskId , TaskConstraints constraints , TaskUpdateOptions taskUpdateOptions , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromHeaderResponse ( updateWithServiceResponseAsync ( jobId , taskId , constraints , taskUpdateOptions ) , serviceCallback ) ;
public class HttpRequestSubscriber { /** * Invoked on each write of an { @ link HttpObject } . */ @ Override public void operationComplete ( ChannelFuture future ) throws Exception { } }
// If a message has been sent out , cancel the timeout for starting a request . cancelTimeout ( ) ; if ( future . isSuccess ( ) ) { // The first write is always the first headers , so log that we finished our first transfer over the // wire . if ( ! loggedRequestFirstBytesTransferred ) { logBuilder . requestFirstBytesTransferred ( ) ; loggedRequestFirstBytesTransferred = true ; } if ( state == State . DONE ) { // Successfully sent the request ; schedule the response timeout . response . scheduleTimeout ( ch . eventLoop ( ) ) ; } // Request more messages regardless whether the state is DONE . It makes the producer have // a chance to produce the last call such as ' onComplete ' and ' onError ' when there are // no more messages it can produce . if ( ! isSubscriptionCompleted ) { assert subscription != null ; subscription . request ( 1 ) ; } return ; } fail ( future . cause ( ) ) ; final Throwable cause = future . cause ( ) ; if ( ! ( cause instanceof ClosedPublisherException ) ) { final Channel ch = future . channel ( ) ; Exceptions . logIfUnexpected ( logger , ch , HttpSession . get ( ch ) . protocol ( ) , cause ) ; ch . close ( ) ; }
public class QuestionnaireBuilder { /** * Special Types - - - - - */ private void addReferenceQuestions ( QuestionnaireItemComponent group , ElementDefinition element , String path , List < CanonicalType > profileURL , List < QuestionnaireResponse . QuestionnaireResponseItemComponent > answerGroups ) throws FHIRException { } }
// var // rn : String ; // i : integer ; // q : TFhirQuestionnaireGroupQuestion ; ToolingExtensions . addFhirType ( group , "Reference" ) ; QuestionnaireItemComponent q = addQuestion ( group , QuestionnaireItemType . REFERENCE , path , "value" , group . getText ( ) , answerGroups ) ; group . setText ( null ) ; CommaSeparatedStringBuilder rn = new CommaSeparatedStringBuilder ( ) ; for ( UriType u : profileURL ) if ( u . getValue ( ) . startsWith ( "http://hl7.org/fhir/StructureDefinition/" ) ) rn . append ( u . getValue ( ) . substring ( 40 ) ) ; if ( rn . length ( ) == 0 ) ToolingExtensions . addReferenceFilter ( q , "subject=$subj&patient=$subj&encounter=$encounter" ) ; else { ToolingExtensions . addAllowedResource ( q , rn . toString ( ) ) ; ToolingExtensions . addReferenceFilter ( q , "subject=$subj&patient=$subj&encounter=$encounter" ) ; } for ( QuestionnaireResponse . QuestionnaireResponseItemComponent ag : answerGroups ) ag . setText ( null ) ;
public class CurvedArrow { /** * Draws a highlight of the curve . * @ param g * the graphics to draw the highlight of the curve upon */ public void drawHighlight ( Graphics2D g ) { } }
if ( needsRefresh ) refreshCurve ( ) ; Graphics2D g2 = ( Graphics2D ) g . create ( ) ; g2 . setStroke ( new java . awt . BasicStroke ( 6.0f ) ) ; g2 . setColor ( HIGHLIGHT_COLOR ) ; g2 . draw ( curve ) ; g2 . transform ( affineToText ) ; g2 . fill ( bounds ) ; g2 . dispose ( ) ;
public class Selector { /** * Find the first element that matches the query . * @ param cssQuery CSS selector * @ param root root element to descend into * @ return the matching element , or < b > null < / b > if none . */ public static Element selectFirst ( String cssQuery , Element root ) { } }
Validate . notEmpty ( cssQuery ) ; return Collector . findFirst ( QueryParser . parse ( cssQuery ) , root ) ;
public class KafkaUtils { /** * SUPPRESS CHECKSTYLE : OFF JavadocMethodRegex */ public static byte [ ] serializeLong ( Long data ) { } }
if ( data == null ) { return null ; } return new byte [ ] { ( byte ) ( data >>> 56 ) , ( byte ) ( data >>> 48 ) , ( byte ) ( data >>> 40 ) , ( byte ) ( data >>> 32 ) , ( byte ) ( data >>> 24 ) , ( byte ) ( data >>> 16 ) , ( byte ) ( data >>> 8 ) , data . byteValue ( ) } ;
public class AlbumUtils { /** * Generate a random jpg file path . * @ param context context . * @ return file path . */ @ NonNull public static String randomJPGPath ( Context context ) { } }
if ( ! Environment . MEDIA_MOUNTED . equals ( Environment . getExternalStorageState ( ) ) ) { return randomJPGPath ( context . getCacheDir ( ) ) ; } return randomJPGPath ( ) ;
public class UUCoder { /** * encodes a byte array to a String * @ param barr * @ return encoded String */ public static String encode ( byte barr [ ] ) { } }
StringBuilder rtn = new StringBuilder ( ) ; int len = barr . length ; int read = 0 ; boolean stop = false ; byte b = 0 ; int offset = 0 ; do { int left = len - read ; if ( left == 0 ) stop = true ; if ( left <= 45 ) b = ( byte ) left ; else b = 45 ; rtn . append ( _enc ( b ) ) ; for ( int i = 0 ; i < b ; i += 3 ) { if ( len - offset < 3 ) { byte padding [ ] = new byte [ 3 ] ; for ( int z = 0 ; offset + z < len ; z ++ ) padding [ z ] = barr [ offset + z ] ; encodeBytes ( padding , 0 , rtn ) ; } else { encodeBytes ( barr , offset , rtn ) ; } offset += 3 ; } rtn . append ( '\n' ) ; read += b ; if ( b < 45 ) stop = true ; } while ( ! stop ) ; return rtn . toString ( ) ;
public class StepExecution { /** * Fully - resolved values passed into the step before execution . * @ param inputs * Fully - resolved values passed into the step before execution . * @ return Returns a reference to this object so that method calls can be chained together . */ public StepExecution withInputs ( java . util . Map < String , String > inputs ) { } }
setInputs ( inputs ) ; return this ;
public class Computer { /** * Returns the { @ link Node } description for this computer */ @ Restricted ( DoNotUse . class ) @ Exported public @ Nonnull String getDescription ( ) { } }
Node node = getNode ( ) ; return ( node != null ) ? node . getNodeDescription ( ) : null ;
public class VTimeZone { /** * Parse VTIMEZONE data and create a RuleBasedTimeZone */ private boolean parse ( ) { } }
// / CLOVER : OFF if ( vtzlines == null || vtzlines . size ( ) == 0 ) { return false ; } // / CLOVER : ON // timezone ID String tzid = null ; int state = INI ; boolean dst = false ; // current zone type String from = null ; // current zone from offset String to = null ; // current zone offset String tzname = null ; // current zone name String dtstart = null ; // current zone starts boolean isRRULE = false ; // true if the rule is described by RRULE List < String > dates = null ; // list of RDATE or RRULE strings List < TimeZoneRule > rules = new ArrayList < TimeZoneRule > ( ) ; // rule list int initialRawOffset = 0 ; // initial offset int initialDSTSavings = 0 ; // initial offset long firstStart = MAX_TIME ; // the earliest rule start time for ( String line : vtzlines ) { int valueSep = line . indexOf ( COLON ) ; if ( valueSep < 0 ) { continue ; } String name = line . substring ( 0 , valueSep ) ; String value = line . substring ( valueSep + 1 ) ; switch ( state ) { case INI : if ( name . equals ( ICAL_BEGIN ) && value . equals ( ICAL_VTIMEZONE ) ) { state = VTZ ; } break ; case VTZ : if ( name . equals ( ICAL_TZID ) ) { tzid = value ; } else if ( name . equals ( ICAL_TZURL ) ) { tzurl = value ; } else if ( name . equals ( ICAL_LASTMOD ) ) { // Always in ' Z ' format , so the offset argument for the parse method // can be any value . lastmod = new Date ( parseDateTimeString ( value , 0 ) ) ; } else if ( name . equals ( ICAL_BEGIN ) ) { boolean isDST = value . equals ( ICAL_DAYLIGHT ) ; if ( value . equals ( ICAL_STANDARD ) || isDST ) { // tzid must be ready at this point if ( tzid == null ) { state = ERR ; break ; } // initialize current zone properties dates = null ; isRRULE = false ; from = null ; to = null ; tzname = null ; dst = isDST ; state = TZI ; } else { // BEGIN property other than STANDARD / DAYLIGHT // must not be there . state = ERR ; break ; } } else if ( name . equals ( ICAL_END ) /* & & value . equals ( ICAL _ VTIMEZONE ) */ ) { break ; } break ; case TZI : if ( name . equals ( ICAL_DTSTART ) ) { dtstart = value ; } else if ( name . equals ( ICAL_TZNAME ) ) { tzname = value ; } else if ( name . equals ( ICAL_TZOFFSETFROM ) ) { from = value ; } else if ( name . equals ( ICAL_TZOFFSETTO ) ) { to = value ; } else if ( name . equals ( ICAL_RDATE ) ) { // RDATE mixed with RRULE is not supported if ( isRRULE ) { state = ERR ; break ; } if ( dates == null ) { dates = new LinkedList < String > ( ) ; } // RDATE value may contain multiple date delimited // by comma StringTokenizer st = new StringTokenizer ( value , COMMA ) ; while ( st . hasMoreTokens ( ) ) { String date = st . nextToken ( ) ; dates . add ( date ) ; } } else if ( name . equals ( ICAL_RRULE ) ) { // RRULE mixed with RDATE is not supported if ( ! isRRULE && dates != null ) { state = ERR ; break ; } else if ( dates == null ) { dates = new LinkedList < String > ( ) ; } isRRULE = true ; dates . add ( value ) ; } else if ( name . equals ( ICAL_END ) ) { // Mandatory properties if ( dtstart == null || from == null || to == null ) { state = ERR ; break ; } // if tzname is not available , create one from tzid if ( tzname == null ) { tzname = getDefaultTZName ( tzid , dst ) ; } // create a time zone rule TimeZoneRule rule = null ; int fromOffset = 0 ; int toOffset = 0 ; int rawOffset = 0 ; int dstSavings = 0 ; long start = 0 ; try { // Parse TZOFFSETFROM / TZOFFSETTO fromOffset = offsetStrToMillis ( from ) ; toOffset = offsetStrToMillis ( to ) ; if ( dst ) { // If daylight , use the previous offset as rawoffset if positive if ( toOffset - fromOffset > 0 ) { rawOffset = fromOffset ; dstSavings = toOffset - fromOffset ; } else { // This is rare case . . just use 1 hour DST savings rawOffset = toOffset - DEF_DSTSAVINGS ; dstSavings = DEF_DSTSAVINGS ; } } else { rawOffset = toOffset ; dstSavings = 0 ; } // start time start = parseDateTimeString ( dtstart , fromOffset ) ; // Create the rule Date actualStart = null ; if ( isRRULE ) { rule = createRuleByRRULE ( tzname , rawOffset , dstSavings , start , dates , fromOffset ) ; } else { rule = createRuleByRDATE ( tzname , rawOffset , dstSavings , start , dates , fromOffset ) ; } if ( rule != null ) { actualStart = rule . getFirstStart ( fromOffset , 0 ) ; if ( actualStart . getTime ( ) < firstStart ) { // save from offset information for the earliest rule firstStart = actualStart . getTime ( ) ; // If this is STD , assume the time before this transtion // is DST when the difference is 1 hour . This might not be // accurate , but VTIMEZONE data does not have such info . if ( dstSavings > 0 ) { initialRawOffset = fromOffset ; initialDSTSavings = 0 ; } else { if ( fromOffset - toOffset == DEF_DSTSAVINGS ) { initialRawOffset = fromOffset - DEF_DSTSAVINGS ; initialDSTSavings = DEF_DSTSAVINGS ; } else { initialRawOffset = fromOffset ; initialDSTSavings = 0 ; } } } } } catch ( IllegalArgumentException iae ) { // bad format - rule = = null . . } if ( rule == null ) { state = ERR ; break ; } rules . add ( rule ) ; state = VTZ ; } break ; } if ( state == ERR ) { vtzlines = null ; return false ; } } // Must have at least one rule if ( rules . size ( ) == 0 ) { return false ; } // Create a initial rule InitialTimeZoneRule initialRule = new InitialTimeZoneRule ( getDefaultTZName ( tzid , false ) , initialRawOffset , initialDSTSavings ) ; // Finally , create the RuleBasedTimeZone RuleBasedTimeZone rbtz = new RuleBasedTimeZone ( tzid , initialRule ) ; int finalRuleIdx = - 1 ; int finalRuleCount = 0 ; for ( int i = 0 ; i < rules . size ( ) ; i ++ ) { TimeZoneRule r = rules . get ( i ) ; if ( r instanceof AnnualTimeZoneRule ) { if ( ( ( AnnualTimeZoneRule ) r ) . getEndYear ( ) == AnnualTimeZoneRule . MAX_YEAR ) { finalRuleCount ++ ; finalRuleIdx = i ; } } } if ( finalRuleCount > 2 ) { // Too many final rules return false ; } if ( finalRuleCount == 1 ) { if ( rules . size ( ) == 1 ) { // Only one final rule , only governs the initial rule , // which is already initialized , thus , we do not need to // add this transition rule rules . clear ( ) ; } else { // Normalize the final rule AnnualTimeZoneRule finalRule = ( AnnualTimeZoneRule ) rules . get ( finalRuleIdx ) ; int tmpRaw = finalRule . getRawOffset ( ) ; int tmpDST = finalRule . getDSTSavings ( ) ; // Find the last non - final rule Date finalStart = finalRule . getFirstStart ( initialRawOffset , initialDSTSavings ) ; Date start = finalStart ; for ( int i = 0 ; i < rules . size ( ) ; i ++ ) { if ( finalRuleIdx == i ) { continue ; } TimeZoneRule r = rules . get ( i ) ; Date lastStart = r . getFinalStart ( tmpRaw , tmpDST ) ; if ( lastStart . after ( start ) ) { start = finalRule . getNextStart ( lastStart . getTime ( ) , r . getRawOffset ( ) , r . getDSTSavings ( ) , false ) ; } } TimeZoneRule newRule ; if ( start == finalStart ) { // Transform this into a single transition newRule = new TimeArrayTimeZoneRule ( finalRule . getName ( ) , finalRule . getRawOffset ( ) , finalRule . getDSTSavings ( ) , new long [ ] { finalStart . getTime ( ) } , DateTimeRule . UTC_TIME ) ; } else { // Update the end year int fields [ ] = Grego . timeToFields ( start . getTime ( ) , null ) ; newRule = new AnnualTimeZoneRule ( finalRule . getName ( ) , finalRule . getRawOffset ( ) , finalRule . getDSTSavings ( ) , finalRule . getRule ( ) , finalRule . getStartYear ( ) , fields [ 0 ] ) ; } rules . set ( finalRuleIdx , newRule ) ; } } for ( TimeZoneRule r : rules ) { rbtz . addTransitionRule ( r ) ; } tz = rbtz ; setID ( tzid ) ; return true ;
public class Sendinblue { /** * Get all campaigns detail . * @ param { Object } data contains json objects as a key value pair from HashMap . * @ options data { String } type : Type of campaign . Possible values – classic , trigger , sms , template ( case sensitive ) [ Optional ] * @ options data { String } status : Status of campaign . Possible values – draft , sent , archive , queued , suspended , in _ process , temp _ active , temp _ inactive ( case sensitive ) [ Optional ] * @ options data { Integer } page : Maximum number of records per request is 500 , if there are more than 500 campaigns then you can use this parameter to get next 500 results [ Optional ] * @ options data { Integer } page _ limit : This should be a valid number between 1-1000 . If page limit is kept empty or > 1000 , default is 500 [ Optional ] */ public String get_campaigns_v2 ( Map < String , Object > data ) { } }
String type = data . get ( "type" ) . toString ( ) ; String status = data . get ( "status" ) . toString ( ) ; String page = data . get ( "page" ) . toString ( ) ; String page_limit = data . get ( "page_limit" ) . toString ( ) ; String url ; if ( EMPTY_STRING . equals ( type ) && EMPTY_STRING . equals ( status ) && EMPTY_STRING . equals ( page ) && EMPTY_STRING . equals ( page_limit ) ) { url = "campaign/detailsv2/" ; } else { url = "campaign/detailsv2/type/" + type + "/status/" + status + "/page/" + page + "/page_limit/" + page_limit + "/" ; } return get ( url , EMPTY_STRING ) ;
public class TempByteHolder { /** * Reads all available data from input stream . * @ param is * @ throws IOException */ public void readFrom ( java . io . InputStream is ) throws IOException { } }
int howmuch = 0 ; do { _write_pos += howmuch ; moveWindow ( _write_pos ) ; howmuch = is . read ( _memory_buffer ) ; } while ( howmuch != - 1 ) ;
public class CDIServiceUtils { /** * Returns a unique identifying string for an annotated type . This method should be used when calling BeforeBeanIdentifer . addAnnotatedType ( ) * @ param annotatedType the new annotated type you are createing . Null is an accepted value but should only be used if you know exactly what you are doing . * @ param extensionClass the CDI extension which is to call BeforeBeanIdentifer . addAnnotatedType ( ) * @ return a String that uniquely identifies this annotated type . */ public static String getAnnotatedTypeIdentifier ( AnnotatedType annotatedType , Class < ? > extensionClass ) { } }
// We use the symbolic name as a compromise between makeing the ID unique enough to allow multiple annotated types based on the same type // to quote the javadoc : " This method allows multiple annotated types , based on the same underlying type , to be defined . " // and allowing failover to work . Failover will fail if the BeanIdentifierIndex is not identical across all severs , and some of these // identifiers end up in the BeanIdentifierIndex . In particular problems hae been reported with ViewScopeBeanHolder Bundle bundle = FrameworkUtil . getBundle ( extensionClass ) ; String symbolicName = getSymbolicNameWithoutMinorOrMicroVersionPart ( bundle . getSymbolicName ( ) ) ; if ( annotatedType != null ) { return ( annotatedType . getJavaClass ( ) . getCanonicalName ( ) + "#" + extensionClass . getCanonicalName ( ) + "#" + symbolicName ) ; } else { return ( "NULL" + "#" + extensionClass . getCanonicalName ( ) + "#" + symbolicName ) ; }
public class JsxResourceConverter { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . core . cache . ICacheManager . ICacheManagerListener # initialized ( com . ibm . jaggr . core . IAggregator ) */ @ Override public void initialized ( ICacheManager cacheManager ) { } }
final String sourceMethod = "initialized" ; // $ NON - NLS - 1 $ final boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( sourceClass , sourceMethod ) ; } // Cache manager is initialized . De - register the listener and add our named cache cacheMgrListenerReg . unregister ( ) ; JsxConverter converter = newConverter ( ) ; IGenericCache cache = newCache ( converter , "jsx." , "" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ cache . setAggregator ( aggregator ) ; ResourceConverterCacheImpl oldCache = ( ResourceConverterCacheImpl ) cacheManager . getCache ( ) . putIfAbsent ( JSX_CACHE_NAME , cache ) ; if ( oldCache != null ) { converter = ( JsxConverter ) oldCache . getConverter ( ) ; } if ( isTraceLogging ) { log . logp ( Level . FINER , sourceClass , sourceMethod , "Initializing resource converter" + ( oldCache != null ? " from cache." : "." ) ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ / / $ NON - NLS - 3 $ } converter . initialize ( es , xformerRes . getURI ( ) ) ; if ( isTraceLogging ) { log . exiting ( sourceClass , sourceMethod ) ; }
public class HistogramStatistics { /** * Computes the variance of pixel intensity values for a GrayU8 image represented by the given histogram . * @ param histogram Histogram with N bins * @ param mean Mean of the image . * @ param N number of bins in the histogram . * @ return variance */ public static double variance ( int [ ] histogram , double mean , int N ) { } }
return variance ( histogram , mean , count ( histogram , N ) , N ) ;
public class HistoryFilterPlusDialog { /** * This method initializes jPanel * @ return javax . swing . JPanel */ private JPanel getJPanel ( ) { } }
if ( jPanel == null ) { GridBagConstraints gridBagConstraints12 = new GridBagConstraints ( ) ; java . awt . GridBagConstraints gridBagConstraints11 = new GridBagConstraints ( ) ; java . awt . GridBagConstraints gridBagConstraints6 = new GridBagConstraints ( ) ; ZapLabel descLabel = new ZapLabel ( ) ; descLabel . setLineWrap ( true ) ; descLabel . setWrapStyleWord ( true ) ; descLabel . setText ( Constant . messages . getString ( "history.filter.label.desc" ) ) ; jPanel = new JPanel ( ) ; jPanel . setLayout ( new GridBagLayout ( ) ) ; gridBagConstraints6 . gridwidth = 3 ; gridBagConstraints6 . gridx = 0 ; gridBagConstraints6 . gridy = 3 ; gridBagConstraints6 . insets = new java . awt . Insets ( 5 , 2 , 5 , 2 ) ; gridBagConstraints6 . ipadx = 3 ; gridBagConstraints6 . ipady = 3 ; gridBagConstraints11 . gridx = 0 ; gridBagConstraints11 . gridy = 0 ; gridBagConstraints11 . insets = new java . awt . Insets ( 5 , 10 , 5 , 10 ) ; gridBagConstraints11 . weightx = 1.0D ; gridBagConstraints11 . gridwidth = 3 ; gridBagConstraints11 . anchor = java . awt . GridBagConstraints . WEST ; gridBagConstraints11 . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints11 . ipadx = 3 ; gridBagConstraints11 . ipady = 3 ; gridBagConstraints12 . gridx = 0 ; gridBagConstraints12 . weighty = 1.0D ; gridBagConstraints12 . gridwidth = 3 ; gridBagConstraints12 . gridy = 2 ; gridBagConstraints12 . fill = java . awt . GridBagConstraints . BOTH ; gridBagConstraints12 . insets = new java . awt . Insets ( 2 , 10 , 2 , 10 ) ; gridBagConstraints12 . ipadx = 0 ; gridBagConstraints12 . ipady = 1 ; jPanel . add ( descLabel , gridBagConstraints11 ) ; jPanel . add ( getJPanel2 ( ) , gridBagConstraints12 ) ; jPanel . add ( getJPanel1 ( ) , gridBagConstraints6 ) ; } return jPanel ;
public class ZealotConfigManager { /** * 初始化加载Zealot的配置信息到缓存中 . * @ param zealotConfig 配置类实例 * @ param xmlLocations zealot的XML文件所在的位置 , 多个用逗号隔开 * @ param handlerLocations zealot的自定义handler处理器所在的位置 , 多个用逗号隔开 */ public void initLoad ( AbstractZealotConfig zealotConfig , String xmlLocations , String handlerLocations ) { } }
this . xmlLocations = xmlLocations ; this . handlerLocations = handlerLocations ; this . initLoad ( zealotConfig ) ;
public class RobustLoaderWriterResilienceStrategy { /** * Get all entries for the provided keys . Entries not found by the loader - writer are expected to be an entry * with the key and a null value . * @ param keys the keys being retrieved * @ param e the triggered failure * @ return a map of key - value pairs as loaded by the loader - writer */ @ SuppressWarnings ( "unchecked" ) @ Override public Map < K , V > getAllFailure ( Iterable < ? extends K > keys , StoreAccessException e ) { } }
try { return loaderWriter . loadAll ( ( Iterable ) keys ) ; // FIXME : bad typing that we should fix } catch ( BulkCacheLoadingException e1 ) { throw e1 ; } catch ( Exception e1 ) { throw ExceptionFactory . newCacheLoadingException ( e1 , e ) ; } finally { cleanup ( keys , e ) ; }
public class ThreadPoolTaskScheduler { /** * { @ inheritDoc } * @ see org . audit4j . core . schedule . AsyncTaskExecutor # submit ( java . lang . Runnable ) */ @ Override public Future < ? > submit ( Runnable task ) { } }
ExecutorService executor = getScheduledExecutor ( ) ; try { return executor . submit ( errorHandlingTask ( task , false ) ) ; } catch ( RejectedExecutionException ex ) { throw new TaskRejectedException ( "Executor [" + executor + "] did not accept task: " + task , ex ) ; }
public class VortexWorker { /** * Starts the scheduler and executor and waits until termination . */ @ Override public byte [ ] call ( final byte [ ] memento ) throws Exception { } }
final ExecutorService schedulerThread = Executors . newSingleThreadExecutor ( ) ; final ExecutorService commandExecutor = Executors . newFixedThreadPool ( numOfThreads ) ; final ConcurrentMap < Integer , Future > futures = new ConcurrentHashMap < > ( ) ; // Scheduling thread starts schedulerThread . execute ( new Runnable ( ) { @ SuppressWarnings ( "InfiniteLoopStatement" ) // Scheduler is supposed to run forever . @ Override public void run ( ) { while ( true ) { // Scheduler Thread : Pick a command to execute ( For now , simple FIFO order ) final byte [ ] message ; try { message = pendingRequests . takeFirst ( ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( e ) ; } // Command Executor : Deserialize the command final MasterToWorkerRequest masterToWorkerRequest = ( MasterToWorkerRequest ) kryoUtils . deserialize ( message ) ; switch ( masterToWorkerRequest . getType ( ) ) { case AggregateTasklets : final TaskletAggregationRequest taskletAggregationRequest = ( TaskletAggregationRequest ) masterToWorkerRequest ; aggregates . put ( taskletAggregationRequest . getAggregateFunctionId ( ) , new AggregateContainer ( heartBeatTriggerManager , kryoUtils , workerReports , taskletAggregationRequest ) ) ; break ; case ExecuteAggregateTasklet : executeAggregateTasklet ( commandExecutor , masterToWorkerRequest ) ; break ; case ExecuteTasklet : executeTasklet ( commandExecutor , futures , masterToWorkerRequest ) ; break ; case CancelTasklet : final TaskletCancellationRequest cancellationRequest = ( TaskletCancellationRequest ) masterToWorkerRequest ; LOG . log ( Level . FINE , "Cancelling Tasklet with ID {0}." , cancellationRequest . getTaskletId ( ) ) ; final Future future = futures . get ( cancellationRequest . getTaskletId ( ) ) ; if ( future != null ) { future . cancel ( true ) ; } break ; default : throw new RuntimeException ( "Unknown Command" ) ; } } } } ) ; terminated . await ( ) ; return null ;
public class DSClientFactory { /** * Gets the host filter predicate . * @ param hostFilterPolicy * the host filter policy * @ return the host filter predicate */ private Predicate < com . datastax . driver . core . Host > getHostFilterPredicate ( String hostFilterPolicy ) { } }
Predicate < com . datastax . driver . core . Host > predicate = null ; Method getter = null ; Class < ? > hostFilterClazz = null ; try { hostFilterClazz = Class . forName ( hostFilterPolicy ) ; getter = hostFilterClazz . getDeclaredMethod ( GET_INSTANCE ) ; predicate = ( Predicate < com . datastax . driver . core . Host > ) getter . invoke ( KunderaCoreUtils . createNewInstance ( hostFilterClazz ) ) ; } catch ( ClassNotFoundException e ) { logger . error ( e . getMessage ( ) ) ; throw new KunderaException ( "Please make sure class " + hostFilterPolicy + " set in property file exists in classpath " + e . getMessage ( ) ) ; } catch ( IllegalAccessException e ) { logger . error ( e . getMessage ( ) ) ; throw new KunderaException ( "Method " + getter . getName ( ) + " must be declared public " + e . getMessage ( ) ) ; } catch ( NoSuchMethodException e ) { logger . error ( e . getMessage ( ) ) ; throw new KunderaException ( "Please make sure getter method of " + hostFilterClazz . getSimpleName ( ) + " is named \"getInstance()\"" ) ; } catch ( InvocationTargetException e ) { logger . error ( e . getMessage ( ) ) ; throw new KunderaException ( "Error while executing \"getInstance()\" method of Class " + hostFilterClazz . getSimpleName ( ) + ": " + e . getMessage ( ) ) ; } catch ( SecurityException e ) { logger . error ( e . getMessage ( ) ) ; throw new KunderaException ( "Encountered security exception while accessing the method: " + "\"getInstance()\"" + e . getMessage ( ) ) ; } return predicate ;
public class ByteIntervalProperty { /** * Utility method to include the SI unit name . * @ param value The value of a Long in String form . * @ return Rounded value with appended SI units . For example , 45.3MB , 62B , 27.2GB , etc . */ public String addUnits ( String value ) { } }
StringBuffer buff = new StringBuffer ( 100 ) ; long bytes = Long . parseLong ( value ) ; if ( bytes < 1000000 ) { buff . append ( value ) ; buff . append ( "B" ) ; } else { int unit = 1000 ; int exp = ( int ) ( Math . log ( bytes ) / Math . log ( unit ) ) ; String pre = "kMGTPE" . charAt ( exp - 1 ) + "" ; String ov = String . format ( "%.1f%sB" , bytes / Math . pow ( unit , exp ) , pre ) ; buff . append ( ov ) ; } return buff . toString ( ) ;