signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class DependencyList { /** * Internal method called to resolve dependencies the first time one of the
* accessor methods is called . This is done in order to make object creation
* light - weight and to avoid the possibility of throwing an exception in the
* object constructor .
* @ throws IOException */
synchronized void initialize ( ) throws IOException { } } | if ( initialized ) { return ; } final boolean traceLogging = log . isLoggable ( Level . FINEST ) ; final boolean entryExitLogging = log . isLoggable ( Level . FINER ) ; final String methodName = "initialize" ; // $ NON - NLS - 1 $
if ( entryExitLogging ) { log . entering ( DependencyList . class . getName ( ) , methodName ) ; } // A call to getDeclaredDependencies is made to ensure that the time stamp calculated to mark the beginning of finding the expanded
// dependencies is done only after forming the dependency map is completed .
aggr . getDependencies ( ) . getDelcaredDependencies ( "require" ) ; // $ NON - NLS - 1 $
long stamp = aggr . getDependencies ( ) . getLastModified ( ) ; // save time stamp
try { explicitDeps = new ModuleDeps ( ) ; expandedDeps = new ModuleDeps ( ) ; if ( traceLogging ) { log . finest ( "dependent features = " + dependentFeatures ) ; // $ NON - NLS - 1 $
} for ( String name : names ) { processDep ( name , explicitDeps , null , new HashSet < String > ( ) , null ) ; } // Now expand the explicit dependencies
resolveAliases = true ; for ( Map . Entry < String , ModuleDepInfo > entry : explicitDeps . entrySet ( ) ) { expandDependencies ( entry . getKey ( ) , entry . getValue ( ) , expandedDeps ) ; } expandedDeps . keySet ( ) . removeAll ( IDependencies . excludes ) ; // Resolve feature conditionals based on the specified feature set . This is
// necessary because we don ' t specify features when doing has ! plugin branching
// so that dependent features that are discovered by has ! plugin branching don ' t
// vary based on the specified features .
explicitDeps . resolveWith ( features , coerceUndefinedToFalse ) ; expandedDeps . resolveWith ( features , coerceUndefinedToFalse ) ; if ( traceLogging ) { log . finest ( "explicitDeps after applying features: " + explicitDeps ) ; // $ NON - NLS - 1 $
log . finest ( "expandedDeps after applying features: " + expandedDeps ) ; // $ NON - NLS - 1 $
} if ( stamp != aggr . getDependencies ( ) . getLastModified ( ) ) { // if time stamp has changed , that means that dependencies have been
// updated while we were processing them . Throw an exception to avoid
// caching the response with possibly corrupt dependency info .
throw new IllegalStateException ( "" + stamp + "!=" + aggr . getDependencies ( ) . getLastModified ( ) ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
} } finally { initialized = true ; } if ( entryExitLogging ) { log . exiting ( DependencyList . class . getName ( ) , methodName ) ; } |
public class Hdf5Archive { /** * Read JSON - formatted string attribute from group path .
* @ param attributeName Name of attribute
* @ param groups Array of zero or more ancestor groups from root to parent .
* @ return HDF5 attribute as JSON
* @ throws UnsupportedKerasConfigurationException Unsupported Keras config */
public String readAttributeAsJson ( String attributeName , String ... groups ) throws UnsupportedKerasConfigurationException { } } | synchronized ( Hdf5Archive . LOCK_OBJECT ) { if ( groups . length == 0 ) { Attribute a = this . file . openAttribute ( attributeName ) ; String s = readAttributeAsJson ( a ) ; a . deallocate ( ) ; return s ; } Group [ ] groupArray = openGroups ( groups ) ; Attribute a = groupArray [ groups . length - 1 ] . openAttribute ( attributeName ) ; String s = readAttributeAsJson ( a ) ; a . deallocate ( ) ; closeGroups ( groupArray ) ; return s ; } |
public class JobExecutionStatusDetails { /** * The job execution status .
* @ param detailsMap
* The job execution status .
* @ return Returns a reference to this object so that method calls can be chained together . */
public JobExecutionStatusDetails withDetailsMap ( java . util . Map < String , String > detailsMap ) { } } | setDetailsMap ( detailsMap ) ; return this ; |
public class AbstractJtsCodec { /** * LinearRing - - - */
protected com . vividsolutions . jts . geom . LinearRing toJtsLinearRing ( LinearRing src ) { } } | return this . geometryFactory . createLinearRing ( StreamSupport . stream ( src . positions ( ) . children ( ) . spliterator ( ) , false ) . map ( sp -> new Coordinate ( sp . lon ( ) , sp . lat ( ) , sp . alt ( ) ) ) . toArray ( Coordinate [ ] :: new ) ) ; |
public class DefaultVarExploder { /** * Scans the fields on the class or super classes to look for
* field - level annotations .
* @ param c */
private void scanFields ( Class < ? > c ) { } } | if ( ! c . isInterface ( ) ) { Field [ ] fields = c . getDeclaredFields ( ) ; for ( Field field : fields ) { String fieldName = field . getName ( ) ; if ( pairs . containsKey ( fieldName ) ) { if ( field . isAnnotationPresent ( UriTransient . class ) ) { pairs . remove ( fieldName ) ; } else if ( field . isAnnotationPresent ( VarName . class ) ) { String name = field . getAnnotation ( VarName . class ) . value ( ) ; pairs . put ( name , pairs . get ( fieldName ) ) ; pairs . remove ( fieldName ) ; } } } } /* * We still need to scan the fields of the super class if its
* not Object to check for annotations . There might be a better
* way to do this . */
if ( ! c . getSuperclass ( ) . equals ( Object . class ) ) { scanFields ( c . getSuperclass ( ) ) ; } |
public class CatalogMap { /** * Get an item from the map by name
* @ param name The name of the requested CatalogType instance in the map
* @ return The item found in the map , or null if not found */
public T get ( String name ) { } } | if ( m_items == null ) { return null ; } return m_items . get ( name . toUpperCase ( ) ) ; |
public class HierarchyEntityUtils { /** * 得到给定节点的所有家族结点 , 包括自身
* @ param root 指定根节点
* @ return 包含自身的家族节点集合
* @ param < T > a T object . */
public static < T extends HierarchyEntity < T , ? > > Set < T > getFamily ( T root ) { } } | Set < T > nodes = CollectUtils . newHashSet ( ) ; nodes . add ( root ) ; loadChildren ( root , nodes ) ; return nodes ; |
public class Serialiser { /** * Deserialises the given { @ link InputStream } to a JSON String .
* @ param input The stream to deserialise .
* @ param type The message type to deserialise into .
* @ param < O > The type to deserialise to .
* @ return A new instance of the given type .
* @ throws IOException If an error occurs in reading from the input stream . */
public static < O > O deserialise ( InputStream input , Type type ) throws IOException { } } | Gson gson = getBuilder ( ) . create ( ) ; try ( Reader reader = DebugReader . newInstance ( input ) ) { return gson . fromJson ( reader , type ) ; } |
public class DockerVolumeConfiguration { /** * A map of Docker driver - specific options passed through . This parameter maps to < code > DriverOpts < / code > in the < a
* href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / VolumeCreate " > Create a volume < / a > section of the < a
* href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the < code > xxopt < / code > option to < a
* href = " https : / / docs . docker . com / engine / reference / commandline / volume _ create / " > < code > docker volume create < / code >
* @ param driverOpts
* A map of Docker driver - specific options passed through . This parameter maps to < code > DriverOpts < / code > in
* the < a href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / VolumeCreate " > Create a volume < / a > section
* of the < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the
* < code > xxopt < / code > option to < a
* href = " https : / / docs . docker . com / engine / reference / commandline / volume _ create / " >
* < code > docker volume create < / code > < / a > .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DockerVolumeConfiguration withDriverOpts ( java . util . Map < String , String > driverOpts ) { } } | setDriverOpts ( driverOpts ) ; return this ; |
public class EnvironmentClassLoader { /** * Stops the environment , closing down any resources .
* The resources are closed in the reverse order that they ' re started */
@ Override public void stop ( ) { } } | if ( ! getLifecycle ( ) . toStop ( ) ) { return ; } ArrayList < EnvLoaderListener > listeners = getEnvironmentListeners ( ) ; Thread thread = Thread . currentThread ( ) ; ClassLoader oldLoader = thread . getContextClassLoader ( ) ; thread . setContextClassLoader ( this ) ; try { // closing down in reverse
if ( listeners != null ) { for ( int i = listeners . size ( ) - 1 ; i >= 0 ; i -- ) { EnvLoaderListener listener = listeners . get ( i ) ; try { listener . environmentStop ( this ) ; } catch ( Throwable e ) { log ( ) . log ( Level . WARNING , e . toString ( ) , e ) ; } } } super . stop ( ) ; } finally { thread . setContextClassLoader ( oldLoader ) ; // drain the thread pool for GC
// XXX : ExecutorThreadPoolBaratine . getThreadPool ( ) . stopEnvironment ( this ) ;
} |
public class AbstractAzkabanServlet { /** * Retrieves a success message from a cookie . azkaban . failure . message */
protected String getErrorMessageFromCookie ( final HttpServletRequest request ) { } } | final Cookie cookie = getCookieByName ( request , AZKABAN_FAILURE_MESSAGE ) ; if ( cookie == null ) { return null ; } return cookie . getValue ( ) ; |
public class MutationVerbHandler { /** * Older version ( < 1.0 ) will not send this message at all , hence we don ' t
* need to check the version of the data . */
private void forwardToLocalNodes ( Mutation mutation , MessagingService . Verb verb , byte [ ] forwardBytes , InetAddress from ) throws IOException { } } | DataInputStream in = new DataInputStream ( new FastByteArrayInputStream ( forwardBytes ) ) ; int size = in . readInt ( ) ; // tell the recipients who to send their ack to
MessageOut < Mutation > message = new MessageOut < > ( verb , mutation , Mutation . serializer ) . withParameter ( Mutation . FORWARD_FROM , from . getAddress ( ) ) ; // Send a message to each of the addresses on our Forward List
for ( int i = 0 ; i < size ; i ++ ) { InetAddress address = CompactEndpointSerializationHelper . deserialize ( in ) ; int id = in . readInt ( ) ; Tracing . trace ( "Enqueuing forwarded write to {}" , address ) ; MessagingService . instance ( ) . sendOneWay ( message , id , address ) ; } |
public class GraphSerialization { /** * Deserializes a { @ link SerializableDirectedGraph } object that is stored in the
* given location . This method returns the { @ link DirectedGraph } object , that is wrapped
* in the { @ link SerializableDirectedGraph } .
* @ param location Must not be { @ code null } and a valid file path .
* @ return The { @ link DirectedGraph } object , that is wrapped in the
* { @ link SerializableDirectedGraph } .
* @ throws IOException Thrown if errors occurred on the IO level .
* @ throws ClassNotFoundException Thrown if a class could not be find while deserialization . */
public static DirectedGraph < Integer , DefaultEdge > loadGraph ( String location ) throws IOException , ClassNotFoundException { } } | File file = new File ( location ) ; if ( ! file . canWrite ( ) ) { throw new IOException ( "Cannot read from file " + location ) ; } return GraphSerialization . loadGraph ( file ) ; |
public class Matrix4x3f { /** * / * ( non - Javadoc )
* @ see org . joml . Matrix4x3fc # invert ( org . joml . Matrix4x3f ) */
public Matrix4x3f invert ( Matrix4x3f dest ) { } } | if ( ( properties & PROPERTY_IDENTITY ) != 0 ) return dest . identity ( ) ; else if ( ( properties & PROPERTY_ORTHONORMAL ) != 0 ) return invertOrthonormal ( dest ) ; return invertGeneric ( dest ) ; |
public class SerializationClassNameFilter { /** * Throws { @ link SecurityException } if the given class name appears on the blacklist or does not appear a whitelist .
* @ param className class name to check
* @ throws SecurityException if the classname is not allowed for deserialization */
public void filter ( String className ) throws SecurityException { } } | if ( blacklist . isListed ( className ) ) { throw new SecurityException ( format ( DESERIALIZATION_ERROR , className ) ) ; } // if whitelisting is enabled ( either explicit or as a default whitelist ) , force the whitelist check
if ( useDefaultWhitelist || ! whitelist . isEmpty ( ) ) { if ( whitelist . isListed ( className ) || ( useDefaultWhitelist && DEFAULT_WHITELIST . isListed ( className ) ) ) { return ; } throw new SecurityException ( format ( DESERIALIZATION_ERROR , className ) ) ; } |
public class GetStagesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetStagesRequest getStagesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getStagesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getStagesRequest . getRestApiId ( ) , RESTAPIID_BINDING ) ; protocolMarshaller . marshall ( getStagesRequest . getDeploymentId ( ) , DEPLOYMENTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CampaignManager { /** * Read the xml campaign file
* @ param fileName the xml campaign file
* @ return an object describing the campaign
* @ throws java . lang . Exception */
public Campaign readFile ( String fileName ) throws Exception { } } | Campaign result = new Campaign ( ) ; DocumentBuilderFactory dbf = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder db = dbf . newDocumentBuilder ( ) ; Document doc = db . parse ( fileName ) ; doc . getDocumentElement ( ) . normalize ( ) ; Element el = doc . getDocumentElement ( ) ; if ( ! el . getNodeName ( ) . equals ( "campaign" ) ) { throw new Exception ( fileName + " is not a valid xml campain file" ) ; } result . name = el . getAttributeNode ( "name" ) . getValue ( ) ; NodeList nodeLst = doc . getElementsByTagName ( "run" ) ; for ( int s = 0 ; s < nodeLst . getLength ( ) ; s ++ ) { Node node = nodeLst . item ( s ) ; if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { Element element = ( Element ) node ; CampaignRun run = new CampaignRun ( ) ; run . testbed = element . getAttribute ( "testbed" ) ; result . runs . add ( run ) ; NodeList nodeList = element . getElementsByTagName ( "testsuite" ) ; for ( int t = 0 ; t < nodeList . getLength ( ) ; t ++ ) { TestSuiteParams params = new TestSuiteParams ( ) ; run . testsuites . add ( params ) ; params . setDirectory ( nodeList . item ( t ) . getAttributes ( ) . getNamedItem ( "directory" ) . getNodeValue ( ) ) ; NodeList childList = nodeList . item ( t ) . getChildNodes ( ) ; for ( int c = 0 ; c < childList . getLength ( ) ; c ++ ) { Node childNode = childList . item ( c ) ; if ( childNode . getNodeName ( ) . equals ( "testdata" ) ) { String selectorStr = childNode . getAttributes ( ) . getNamedItem ( "selector" ) . getNodeValue ( ) ; String [ ] selectedRowsStr = selectorStr . split ( "," ) ; TreeSet < Integer > selectedRows = new TreeSet < > ( ) ; for ( String selectedRowStr : selectedRowsStr ) { selectedRows . add ( Integer . parseInt ( selectedRowStr ) ) ; } params . setDataRows ( selectedRows ) ; } if ( childList . item ( c ) . getNodeName ( ) . equals ( "loopInHours" ) ) { params . setLoopInHours ( true ) ; } if ( childList . item ( c ) . getNodeName ( ) . equals ( "count" ) ) { try { params . setCount ( Integer . parseInt ( childList . item ( c ) . getTextContent ( ) ) ) ; } catch ( NumberFormatException e ) { logger . error ( "count field in " + fileName + " file should be numeric" ) ; } } } } } } return result ; |
public class DescribeInstancesRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < DescribeInstancesRequest > getDryRunRequest ( ) { } } | Request < DescribeInstancesRequest > request = new DescribeInstancesRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class JFinal { /** * 用于在 Eclipse 中 , 通过创建 main 方法的方式启动项目 , 支持热加载 */
public static void start ( String webAppDir , int port , String context , int scanIntervalSeconds ) { } } | server = com . jfinal . server . jetty . ServerFactory . getServer ( webAppDir , port , context , scanIntervalSeconds ) ; server . start ( ) ; |
public class TurfConversion { /** * Converts a distance to a different unit specified .
* @ param distance the distance to be converted
* @ param originalUnit of the distance , must be one of the units defined in
* { @ link TurfUnitCriteria }
* @ param finalUnit returned unit , { @ link TurfConstants # UNIT _ DEFAULT } if not specified
* @ return the converted distance
* @ since 2.2.0 */
public static double convertLength ( @ FloatRange ( from = 0 ) double distance , @ NonNull @ TurfUnitCriteria String originalUnit , @ Nullable @ TurfUnitCriteria String finalUnit ) { } } | if ( finalUnit == null ) { finalUnit = TurfConstants . UNIT_DEFAULT ; } return radiansToLength ( lengthToRadians ( distance , originalUnit ) , finalUnit ) ; |
public class Option { /** * Open model file .
* @ return the buffered reader */
public BufferedReader openModelFile ( ) { } } | String filename = modelDir + File . separator + modelFile ; BufferedReader fin = null ; try { fin = new BufferedReader ( new InputStreamReader ( new FileInputStream ( filename ) , "UTF-8" ) ) ; } catch ( IOException e ) { System . out . println ( e . toString ( ) ) ; return null ; } return fin ; |
public class MtasFieldsConsumer { /** * ( non - Javadoc )
* @ see org . apache . lucene . codecs . FieldsConsumer # merge ( org . apache . lucene . index .
* MergeState ) */
@ Override public void merge ( MergeState mergeState ) throws IOException { } } | final List < Fields > fields = new ArrayList < > ( ) ; final List < ReaderSlice > slices = new ArrayList < > ( ) ; int docBase = 0 ; for ( int readerIndex = 0 ; readerIndex < mergeState . fieldsProducers . length ; readerIndex ++ ) { final FieldsProducer f = mergeState . fieldsProducers [ readerIndex ] ; final int maxDoc = mergeState . maxDocs [ readerIndex ] ; f . checkIntegrity ( ) ; slices . add ( new ReaderSlice ( docBase , maxDoc , readerIndex ) ) ; fields . add ( f ) ; docBase += maxDoc ; } Fields mergedFields = new MappedMultiFields ( mergeState , new MultiFields ( fields . toArray ( Fields . EMPTY_ARRAY ) , slices . toArray ( ReaderSlice . EMPTY_ARRAY ) ) ) ; write ( mergedFields ) ; |
public class JobStorageDatabaseErrorHandler { /** * / * package */
void deleteDatabaseFile ( String fileName ) { } } | if ( fileName . equalsIgnoreCase ( ":memory:" ) || fileName . trim ( ) . length ( ) == 0 ) { return ; } CAT . e ( "deleting the database file: " + fileName ) ; try { File databaseFile = new File ( fileName ) ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN ) { deleteApi16 ( databaseFile ) ; } else { deleteApi14 ( JobManager . instance ( ) . getContext ( ) , databaseFile ) ; } } catch ( Exception e ) { /* print warning and ignore exception */
CAT . w ( e , "delete failed: " + e . getMessage ( ) ) ; } |
public class ServerImpl { /** * Adds a member to the server .
* @ param member The user to add . */
public void addMember ( JsonNode member ) { } } | User user = api . getOrCreateUser ( member . get ( "user" ) ) ; members . put ( user . getId ( ) , user ) ; if ( member . hasNonNull ( "nick" ) ) { nicknames . put ( user . getId ( ) , member . get ( "nick" ) . asText ( ) ) ; } if ( member . hasNonNull ( "mute" ) ) { setMuted ( user . getId ( ) , member . get ( "mute" ) . asBoolean ( ) ) ; } if ( member . hasNonNull ( "deaf" ) ) { setDeafened ( user . getId ( ) , member . get ( "deaf" ) . asBoolean ( ) ) ; } for ( JsonNode roleIds : member . get ( "roles" ) ) { long roleId = Long . parseLong ( roleIds . asText ( ) ) ; getRoleById ( roleId ) . map ( role -> ( ( RoleImpl ) role ) ) . ifPresent ( role -> role . addUserToCache ( user ) ) ; } joinedAtTimestamps . put ( user . getId ( ) , OffsetDateTime . parse ( member . get ( "joined_at" ) . asText ( ) ) . toInstant ( ) ) ; synchronized ( readyConsumers ) { if ( ! ready && members . size ( ) == getMemberCount ( ) ) { ready = true ; readyConsumers . forEach ( consumer -> consumer . accept ( this ) ) ; readyConsumers . clear ( ) ; } } |
public class CoreRemoteMongoCollectionImpl { /** * Update all documents in the collection according to the specified arguments .
* @ param filter a document describing the query filter , which may not be null .
* @ param update a document describing the update , which may not be null . The update to apply
* must include only update operators .
* @ return the result of the update many operation */
public RemoteUpdateResult updateMany ( final Bson filter , final Bson update ) { } } | return updateMany ( filter , update , new RemoteUpdateOptions ( ) ) ; |
public class AbstractAmazonDynamoDBAsync { /** * Simplified method form for invoking the GetItem operation with an AsyncHandler .
* @ see # getItemAsync ( GetItemRequest , com . amazonaws . handlers . AsyncHandler ) */
@ Override public java . util . concurrent . Future < GetItemResult > getItemAsync ( String tableName , java . util . Map < String , AttributeValue > key , com . amazonaws . handlers . AsyncHandler < GetItemRequest , GetItemResult > asyncHandler ) { } } | return getItemAsync ( new GetItemRequest ( ) . withTableName ( tableName ) . withKey ( key ) , asyncHandler ) ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcTimeSeriesValue ( ) { } } | if ( ifcTimeSeriesValueEClass == null ) { ifcTimeSeriesValueEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 724 ) ; } return ifcTimeSeriesValueEClass ; |
public class CombinedRegistry { /** * Looks up from all encapsulated Registry ( s ) one by one , and returns the first result found . < br >
* 按次序从所封装的Registry中查找 , 返回第一个找到的结果 。
* If no result can be found , null will be returned .
* 如果全都找不到 , 则返回null 。
* @ see org . apache . camel . spi . Registry # lookup ( java . lang . String , java . lang . Class ) */
public < T > T lookup ( String name , Class < T > type ) { } } | T result = null ; for ( Registry reg : registryList ) { result = reg . lookup ( name , type ) ; if ( result != null ) { break ; } } return result ; |
public class Query { /** * The WHERE clause .
* If there is no where clause already defined , sets to the defined value .
* If there is already an expression for where defined , defines an < code > and < / code > expression between old and new clauses .
* Example :
* < pre >
* query . where ( eq ( column ( " col1 " ) , k ( 1 ) ) . andWhere ( eq ( column ( " col2 " ) , k ( 2 ) )
* will be translated to
* ( " col1 " = 1 ) AND ( " col2 " = 2)
* < / pre >
* @ param where The WHERE expression .
* @ return This expression . */
public Query andWhere ( final Expression where ) { } } | if ( this . where == null ) { this . where = where ; } else { this . where = and ( this . where . enclose ( ) , where . enclose ( ) ) ; } return this ; |
public class JsHdrsImpl { /** * Set the value of the CurrentMEArrivalTimestamp field in the message header .
* Javadoc description supplied by JsMessage interface . */
public final void setCurrentMEArrivalTimestamp ( long value ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setCurrentMEArrivalTimestamp" , Long . valueOf ( value ) ) ; jmo . setLongField ( JsHdrAccess . ARRIVALTIMESTAMP , value ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setCurrentMEArrivalTimestamp" ) ; |
public class CreateAssociationBatchRequestEntryMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateAssociationBatchRequestEntry createAssociationBatchRequestEntry , ProtocolMarshaller protocolMarshaller ) { } } | if ( createAssociationBatchRequestEntry == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getInstanceId ( ) , INSTANCEID_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getParameters ( ) , PARAMETERS_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getAutomationTargetParameterName ( ) , AUTOMATIONTARGETPARAMETERNAME_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getDocumentVersion ( ) , DOCUMENTVERSION_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getTargets ( ) , TARGETS_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getScheduleExpression ( ) , SCHEDULEEXPRESSION_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getOutputLocation ( ) , OUTPUTLOCATION_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getAssociationName ( ) , ASSOCIATIONNAME_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getMaxErrors ( ) , MAXERRORS_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getMaxConcurrency ( ) , MAXCONCURRENCY_BINDING ) ; protocolMarshaller . marshall ( createAssociationBatchRequestEntry . getComplianceSeverity ( ) , COMPLIANCESEVERITY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class UndertowConfiguration { /** * Initializes various maps speeding up access to different configuration parts
* by identifier ( e . g . , socket binding ) */
public synchronized void init ( ) { } } | for ( SocketBinding binding : socketBindings ) { socketBindingsMap . put ( binding . getName ( ) , binding ) ; } for ( SecurityRealm realm : securityRealms ) { securityRealmsMap . put ( realm . getName ( ) , realm ) ; } for ( Interface iface : interfaces ) { interfacesMap . put ( iface . getName ( ) , iface ) ; } if ( subsystem != null ) { for ( UndertowSubsystem . FileHandler handler : subsystem . getFileHandlers ( ) ) { handlersMap . put ( handler . getName ( ) , handler ) ; } if ( subsystem . getFilters ( ) != null ) { for ( UndertowSubsystem . ResponseHeaderFilter filter : subsystem . getFilters ( ) . getResponseHeaders ( ) ) { filtersMap . put ( filter . getName ( ) , filter ) ; } for ( UndertowSubsystem . ErrorPageFilter filter : subsystem . getFilters ( ) . getErrorPages ( ) ) { filtersMap . put ( filter . getName ( ) , filter ) ; } for ( UndertowSubsystem . CustomFilter filter : subsystem . getFilters ( ) . getCustomFilters ( ) ) { filtersMap . put ( filter . getName ( ) , filter ) ; } } } |
public class AxesWalker { /** * This will traverse the heararchy , calling the visitor for
* each member . If the called visitor method returns
* false , the subtree should not be called .
* @ param owner The owner of the visitor , where that path may be
* rewritten if needed .
* @ param visitor The visitor whose appropriate method will be called . */
public void callVisitors ( ExpressionOwner owner , XPathVisitor visitor ) { } } | if ( visitor . visitStep ( owner , this ) ) { callPredicateVisitors ( visitor ) ; if ( null != m_nextWalker ) { m_nextWalker . callVisitors ( this , visitor ) ; } } |
public class XMLObjectImpl { /** * Implementation of ECMAScript [ [ Put ] ] */
@ Override public final void put ( Context cx , Object id , Object value ) { } } | if ( cx == null ) cx = Context . getCurrentContext ( ) ; XMLName xmlName = lib . toXMLNameOrIndex ( cx , id ) ; if ( xmlName == null ) { long index = ScriptRuntime . lastUint32Result ( cx ) ; // XXX Fix this cast
put ( ( int ) index , this , value ) ; return ; } putXMLProperty ( xmlName , value ) ; |
public class ExpressionUtils { /** * Create a { @ code left not in right } expression
* @ param < D > type of expressions
* @ param left lhs of expression
* @ param right rhs of expression
* @ return left not in right */
public static < D > Predicate notIn ( Expression < D > left , Collection < ? extends D > right ) { } } | if ( right . size ( ) == 1 ) { return neConst ( left , right . iterator ( ) . next ( ) ) ; } else { return predicate ( Ops . NOT_IN , left , ConstantImpl . create ( right ) ) ; } |
public class FactoryDerivativeSparse { /** * Creates a sparse three gradient operator .
* @ see boofcv . alg . filter . derivative . GradientThree
* @ param imageType The type of image which is to be processed .
* @ param border How the border should be handled . If null then the borders can ' t be processed .
* @ return Sparse gradient . */
public static < T extends ImageGray < T > , G extends GradientValue > SparseImageGradient < T , G > createThree ( Class < T > imageType , ImageBorder < T > border ) { } } | if ( imageType == GrayF32 . class ) { return ( SparseImageGradient ) new GradientSparseThree_F32 ( ( ImageBorder_F32 ) border ) ; } else if ( imageType == GrayU8 . class ) { return ( SparseImageGradient ) new GradientSparseThree_U8 ( ( ImageBorder_S32 ) border ) ; } else { throw new IllegalArgumentException ( "Unsupported image type " + imageType . getSimpleName ( ) ) ; } |
public class ClientSideMonitoringRequestHandler { /** * Collect { @ link ApiCallMonitoringEvent } per request . */
private ApiCallMonitoringEvent generateApiCallMonitoringEvent ( Request < ? > request ) { } } | String apiName = request . getHandlerContext ( HandlerContextKey . OPERATION_NAME ) ; String serviceId = request . getHandlerContext ( HandlerContextKey . SERVICE_ID ) ; String region = request . getHandlerContext ( HandlerContextKey . SIGNING_REGION ) ; ApiCallAttemptMonitoringEvent lastApiCallAttempt = request . getHandlerContext ( LAST_CALL_ATTEMPT ) ; Long timestamp = null ; Long latency = null ; Integer requestCount = 0 ; AWSRequestMetrics metrics = request . getAWSRequestMetrics ( ) ; if ( metrics != null ) { TimingInfo timingInfo = metrics . getTimingInfo ( ) ; requestCount = timingInfo . getCounter ( AWSRequestMetrics . Field . RequestCount . name ( ) ) == null ? 0 : timingInfo . getCounter ( AWSRequestMetrics . Field . RequestCount . name ( ) ) . intValue ( ) ; TimingInfo latencyTimingInfo = timingInfo . getSubMeasurement ( AwsClientSideMonitoringMetrics . ApiCallLatency . name ( ) ) ; if ( latencyTimingInfo != null ) { latency = convertToLongIfNotNull ( latencyTimingInfo . getTimeTakenMillisIfKnown ( ) ) ; timestamp = latencyTimingInfo . getStartEpochTimeMilliIfKnown ( ) ; } } ApiCallMonitoringEvent event = new ApiCallMonitoringEvent ( ) . withApi ( apiName ) . withVersion ( VERSION ) . withRegion ( region ) . withService ( serviceId ) . withClientId ( clientId ) . withAttemptCount ( requestCount ) . withLatency ( latency ) . withUserAgent ( trimValueIfExceedsMaxLength ( USER_AGENT_KEY , getDefaultUserAgent ( request ) ) ) . withTimestamp ( timestamp ) ; if ( lastApiCallAttempt != null ) { event . withFinalAwsException ( lastApiCallAttempt . getAwsException ( ) ) . withFinalAwsExceptionMessage ( lastApiCallAttempt . getAwsExceptionMessage ( ) ) . withFinalSdkException ( lastApiCallAttempt . getSdkException ( ) ) . withFinalSdkExceptionMessage ( lastApiCallAttempt . getSdkExceptionMessage ( ) ) . withFinalHttpStatusCode ( lastApiCallAttempt . getHttpStatusCode ( ) ) ; } return event ; |
public class DatabaseInfoScreen { /** * Set up all the screen fields . */
public void setupSFields ( ) { } } | this . getMainRecord ( ) . getField ( DatabaseInfo . NAME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getMainRecord ( ) . getField ( DatabaseInfo . DESCRIPTION ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getMainRecord ( ) . getField ( DatabaseInfo . VERSION ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getMainRecord ( ) . getField ( DatabaseInfo . START_ID ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getMainRecord ( ) . getField ( DatabaseInfo . END_ID ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getMainRecord ( ) . getField ( DatabaseInfo . BASE_DATABASE ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; |
public class MapDictionary { /** * @ see java . util . Dictionary # put ( java . lang . Object , java . lang . Object ) */
@ Override public V put ( K key , V value ) { } } | if ( isReadyOnly ( ) ) throw new UnsupportedOperationException ( "Can't add property to read-only dictionary" ) ; return this . localMap . put ( key , value ) ; |
public class ShakeAroundAPI { /** * 摇一摇红包 - 创建红包活动
* @ param accessToken accessToken
* @ param lotteryAddLotteryInfo lotteryAddLotteryInfo
* @ return result */
public static LotteryAddLotteryInfoResult lotteryAddLotteryInfo ( String accessToken , LotteryAddLotteryInfo lotteryAddLotteryInfo ) { } } | return lotteryAddLotteryInfo ( accessToken , JsonUtil . toJSONString ( lotteryAddLotteryInfo ) ) ; |
public class CmsDateResourceComparator { /** * Creates a new instance of this comparator key . < p >
* @ param cms the current OpenCms user context
* @ param resource the resource to create the key for
* @ param dateIdentifiers the date identifiers to use for selecting the date
* @ return a new instance of this comparator key */
private static CmsDateResourceComparator create ( CmsObject cms , CmsResource resource , List < String > dateIdentifiers ) { } } | CmsDateResourceComparator result = new CmsDateResourceComparator ( ) ; result . m_date = calculateDate ( cms , resource , dateIdentifiers , resource . getDateCreated ( ) ) ; return result ; |
public class HtmlDocletWriter { /** * Add method information .
* @ param method the method to be documented
* @ param dl the content tree to which the method information will be added */
private void addMethodInfo ( ExecutableElement method , Content dl ) { } } | TypeElement enclosing = utils . getEnclosingTypeElement ( method ) ; List < ? extends TypeMirror > intfacs = enclosing . getInterfaces ( ) ; ExecutableElement overriddenMethod = utils . overriddenMethod ( method ) ; // Check whether there is any implementation or overridden info to be
// printed . If no overridden or implementation info needs to be
// printed , do not print this section .
if ( ( ! intfacs . isEmpty ( ) && new ImplementedMethods ( method , this . configuration ) . build ( ) . isEmpty ( ) == false ) || overriddenMethod != null ) { MethodWriterImpl . addImplementsInfo ( this , method , dl ) ; if ( overriddenMethod != null ) { MethodWriterImpl . addOverridden ( this , utils . overriddenType ( method ) , overriddenMethod , dl ) ; } } |
public class WriteSinkFunction { /** * Implementation of the invoke method of the SinkFunction class . Collects
* the incoming tuples in tupleList and appends the list to the end of the
* target file if updateCondition ( ) is true or the current tuple is the
* endTuple . */
@ Override public void invoke ( IN tuple ) { } } | tupleList . add ( tuple ) ; if ( updateCondition ( ) ) { format . write ( path , tupleList ) ; resetParameters ( ) ; } |
public class FreemarkerFixture { /** * Applies template to current values .
* @ param aTemplate name of template file ( relative to ' testdata ' directory on classpath )
* @ return template result . */
public String applyTemplate ( String aTemplate ) { } } | String result = getEnvironment ( ) . processTemplate ( aTemplate , getCurrentValues ( ) ) ; result = postProcess ( result ) ; result = formatResult ( aTemplate , result ) ; return result ; |
public class SarlDocumentationParser { /** * Report an error .
* @ param message the message in a format compatible with { @ link MessageFormat } .
* @ param parameters the parameters , starting at { 1 } . */
protected static void reportError ( String message , Object ... parameters ) { } } | Throwable cause = null ; for ( int i = 0 ; cause == null && i < parameters . length ; ++ i ) { if ( parameters [ i ] instanceof Throwable ) { cause = ( Throwable ) parameters [ i ] ; } } final String msg = MessageFormat . format ( message , parameters ) ; if ( cause != null ) { throw new ParsingException ( msg , null , 1 , Throwables . getRootCause ( cause ) ) ; } throw new ParsingException ( msg , null , 1 ) ; |
public class JobClient { /** * Utility that submits a job , then polls for progress until the job is
* complete .
* @ param job the job configuration .
* @ throws IOException if the job fails */
public static RunningJob runJob ( JobConf job ) throws IOException { } } | JobClient jc = new JobClient ( job ) ; RunningJob rj = jc . submitJob ( job ) ; try { if ( ! jc . monitorAndPrintJob ( job , rj ) ) { throw new IOException ( "Job failed!" ) ; } } catch ( InterruptedException ie ) { Thread . currentThread ( ) . interrupt ( ) ; } return rj ; |
public class ParameterMap { /** * Add each parameter in the URL parameter map to the Parameter ' s parent .
* @ throws JspException if a JSP exception has occurred */
public int doStartTag ( ) throws JspException { } } | if ( hasErrors ( ) ) return reportAndExit ( SKIP_BODY ) ; Tag parentTag = findAncestorWithClass ( this , IUrlParams . class ) ; if ( parentTag != null ) { // this map shouldn ' t be null because the attribute is required .
assert ( _map != null ) ; IUrlParams parent = ( IUrlParams ) parentTag ; Iterator it = _map . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Map . Entry key = ( Map . Entry ) it . next ( ) ; parent . addParameter ( key . getKey ( ) . toString ( ) , key . getValue ( ) , null ) ; } } else { String msg = Bundle . getString ( "Tags_InvalidParameterMapParent" ) ; registerTagError ( msg , null ) ; reportErrors ( ) ; } localRelease ( ) ; return SKIP_BODY ; |
public class BlockedBuffer { /** * looking in blocks with indices from lo to ( hi - 1 ) inclusive */
final bbBlock findBlockHelper ( int pos , int lo , int hi ) { } } | bbBlock block ; int ii ; if ( ( hi - lo ) <= 3 ) { for ( ii = lo ; ii < hi ; ii ++ ) { block = this . _blocks . get ( ii ) ; if ( pos > block . _offset + block . _limit ) continue ; if ( block . containsForRead ( pos ) ) { return block ; } if ( block . _offset >= pos ) break ; } return this . _blocks . get ( ii - 1 ) ; // this will always be > 0
} int mid = ( hi + lo ) / 2 ; block = this . _blocks . get ( mid ) ; assert block != null ; if ( block . _offset > pos ) { return findBlockHelper ( pos , lo , mid ) ; } return findBlockHelper ( pos , mid , hi ) ; |
public class FormSubmitEvent { /** * Gets the type associated with this event .
* @ return returns the handler type */
public static Type < FormSubmitHandler < ? > > getType ( ) { } } | if ( type == null ) { // NOPMD it ' s thread save !
synchronized ( FormSubmitHandler . class ) { if ( type == null ) { type = new Type < > ( ) ; } } } return type ; |
public class JsonHash { /** * see { @ link Map # put ( Object , Object ) } .
* this method is alternative of { @ link # put ( String , Object , Type ) } call with { @ link Type # BOOLEAN } .
* @ param key
* @ param value
* @ return see { @ link Map # put ( Object , Object ) }
* @ since 1.4.12
* @ author vvakame */
public Object put ( String key , Boolean value ) { } } | if ( value == null ) { stateMap . put ( key , Type . NULL ) ; } else { stateMap . put ( key , Type . BOOLEAN ) ; } return super . put ( key , value ) ; |
public class ContextXmlReader { /** * Read the context tax and populate a JournalEntryContext object . */
public JournalEntryContext readContext ( XMLEventReader reader ) throws JournalException , XMLStreamException { } } | JournalEntryContext context = new JournalEntryContext ( ) ; XMLEvent event = reader . nextTag ( ) ; if ( ! isStartTagEvent ( event , QNAME_TAG_CONTEXT ) ) { throw getNotStartTagException ( QNAME_TAG_CONTEXT , event ) ; } context . setPassword ( readContextPassword ( reader ) ) ; context . setNoOp ( readContextNoOp ( reader ) ) ; context . setNow ( readContextNow ( reader ) ) ; context . setEnvironmentAttributes ( convertStringMap ( readMultiMap ( reader , CONTEXT_MAPNAME_ENVIRONMENT ) ) ) ; context . setSubjectAttributes ( readMultiMap ( reader , CONTEXT_MAPNAME_SUBJECT ) ) ; context . setActionAttributes ( convertStringMap ( readMultiMap ( reader , CONTEXT_MAPNAME_ACTION ) ) ) ; context . setResourceAttributes ( convertStringMap ( readMultiMap ( reader , CONTEXT_MAPNAME_RESOURCE ) ) ) ; context . setRecoveryAttributes ( convertStringMap ( readMultiMap ( reader , CONTEXT_MAPNAME_RECOVERY ) ) ) ; event = reader . nextTag ( ) ; if ( ! isEndTagEvent ( event , QNAME_TAG_CONTEXT ) ) { throw getNotEndTagException ( QNAME_TAG_CONTEXT , event ) ; } decipherPassword ( context ) ; return context ; |
public class AtomContent { /** * Returns a new instance of HTTP content for an Atom feed .
* @ param namespaceDictionary XML namespace dictionary
* @ param feed data key / value pair for the Atom feed
* @ since 1.5 */
public static AtomContent forFeed ( XmlNamespaceDictionary namespaceDictionary , Object feed ) { } } | return new AtomContent ( namespaceDictionary , feed , false ) ; |
public class BloodhoundOptions { /** * Set a datum tokenizer that uses pre - tokenized tokens ( e . g . from remote ) as
* contained in the datum . It therefore uses the field
* { @ link BloodhoundDatum # JSON _ TOKENS } of each datum .
* @ return this */
@ Nonnull public BloodhoundOptions setDatumTokenizerPreTokenized ( ) { } } | final JSVar aVarDatum = new JSVar ( "d" ) ; return setDatumTokenizer ( new JSAnonymousFunction ( aVarDatum , new JSReturn ( aVarDatum . ref ( BloodhoundDatum . JSON_TOKENS ) ) ) ) ; |
public class Util { /** * Layout .
* @ param c the c */
public static void layout ( @ javax . annotation . Nonnull final Component c ) { } } | c . doLayout ( ) ; if ( c instanceof Container ) { Arrays . stream ( ( ( Container ) c ) . getComponents ( ) ) . forEach ( com . simiacryptus . util . Util :: layout ) ; } |
public class XNodeSet { /** * Cast result object to a nodelist .
* @ return a NodeList .
* @ throws javax . xml . transform . TransformerException */
public NodeList nodelist ( ) throws javax . xml . transform . TransformerException { } } | org . apache . xml . dtm . ref . DTMNodeList nodelist = new org . apache . xml . dtm . ref . DTMNodeList ( this ) ; // Creating a DTMNodeList has the side - effect that it will create a clone
// XNodeSet with cache and run m _ iter to the end . You cannot get any node
// from m _ iter after this call . As a fix , we call SetVector ( ) on the clone ' s
// cache . See Bugzilla 14406.
XNodeSet clone = ( XNodeSet ) nodelist . getDTMIterator ( ) ; SetVector ( clone . getVector ( ) ) ; return nodelist ; |
public class ObservableDataBuilder { /** * Each emission of this observable prepends to the elements of the data . */
@ NonNull public ObservableDataBuilder < T > prepends ( @ Nullable Observable < ? extends Collection < ? extends T > > prepends ) { } } | mPrepends = prepends ; return this ; |
public class ProbeManagerImpl { /** * Get an existing instance of a probe with the specified source class and
* key .
* @ param probedClass the probe source class
* @ param key the probe key
* @ return an existing probe or null */
public synchronized ProbeImpl getProbe ( Class < ? > probedClass , String key ) { } } | Map < String , ProbeImpl > classProbes = probesByKey . get ( probedClass ) ; if ( classProbes != null ) { return classProbes . get ( key ) ; } return null ; |
public class FNORGImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setNomCharInc ( Integer newNomCharInc ) { } } | Integer oldNomCharInc = nomCharInc ; nomCharInc = newNomCharInc ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FNORG__NOM_CHAR_INC , oldNomCharInc , nomCharInc ) ) ; |
public class Util { /** * Returns a single item from the Iterator .
* If there ' s none , returns null .
* If there are more , throws an IllegalStateException .
* @ throws IllegalStateException */
public static final < T > T getSingle ( Iterable < T > it ) { } } | if ( ! it . iterator ( ) . hasNext ( ) ) return null ; final Iterator < T > iterator = it . iterator ( ) ; T o = iterator . next ( ) ; if ( iterator . hasNext ( ) ) throw new IllegalStateException ( "Found multiple items in iterator over " + o . getClass ( ) . getName ( ) ) ; return o ; |
public class Proxy { /** * Execute a redirected request
* @ param stringStatusCode
* @ param httpMethodProxyRequest
* @ param httpServletRequest
* @ param httpServletResponse
* @ throws Exception */
private void processRedirect ( String stringStatusCode , HttpMethod httpMethodProxyRequest , HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse ) throws Exception { } } | // Check if the proxy response is a redirect
// The following code is adapted from
// org . tigris . noodle . filters . CheckForRedirect
// Hooray for open source software
String stringLocation = httpMethodProxyRequest . getResponseHeader ( STRING_LOCATION_HEADER ) . getValue ( ) ; if ( stringLocation == null ) { throw new ServletException ( "Received status code: " + stringStatusCode + " but no " + STRING_LOCATION_HEADER + " header was found in the response" ) ; } // Modify the redirect to go to this proxy servlet rather than the proxied host
String stringMyHostName = httpServletRequest . getServerName ( ) ; if ( httpServletRequest . getServerPort ( ) != 80 ) { stringMyHostName += ":" + httpServletRequest . getServerPort ( ) ; } stringMyHostName += httpServletRequest . getContextPath ( ) ; httpServletResponse . sendRedirect ( stringLocation . replace ( getProxyHostAndPort ( ) + this . getProxyPath ( ) , stringMyHostName ) ) ; |
public class BigImage { /** * Destroy the image and release any native resources .
* Calls on a destroyed image have undefined results */
public void destroy ( ) throws SlickException { } } | for ( int tx = 0 ; tx < xcount ; tx ++ ) { for ( int ty = 0 ; ty < ycount ; ty ++ ) { Image image = images [ tx ] [ ty ] ; image . destroy ( ) ; } } |
public class NERFeatureFactory { /** * = null ; / / cache which keys are generic annotations so we don ' t have to do too many instanceof checks */
@ SuppressWarnings ( { } } | "unchecked" , "SuspiciousMethodCalls" } ) private void makeGenericKeyCache ( CoreLabel c ) { genericAnnotationKeys = new HashSet < Class < ? extends GenericAnnotation < ? > > > ( ) ; for ( Class < ? > key : c . keySet ( ) ) { if ( CoreLabel . genericValues . containsKey ( key ) ) { Class < ? extends GenericAnnotation < ? > > genKey = ( Class < ? extends GenericAnnotation < ? > > ) key ; genericAnnotationKeys . add ( genKey ) ; } } |
public class TypeReference { /** * check if type is a number type ( either primitive or instance of Number )
* @ param type
* @ return
* @ throws ClassNotFoundException */
public boolean isNumber ( ) { } } | if ( type == Byte . TYPE || type == Short . TYPE || type == Integer . TYPE || type == Long . TYPE || type == Float . TYPE || type == Double . TYPE ) { return true ; } else { return isRawTypeSubOf ( Number . class ) ; } |
public class CouchDBObjectMapper { /** * Gets the object from json .
* @ param jsonObj
* the json obj
* @ param clazz
* the clazz
* @ param columns
* the columns
* @ return the object from json */
static Object getObjectFromJson ( JsonObject jsonObj , Class clazz , Set < Attribute > columns ) { } } | Object obj = KunderaCoreUtils . createNewInstance ( clazz ) ; for ( Attribute column : columns ) { JsonElement value = jsonObj . get ( ( ( AbstractAttribute ) column ) . getJPAColumnName ( ) ) ; setFieldValue ( obj , column , value ) ; } return obj ; |
public class TextParameter { /** * Splits a < i > key = value < / i > pair and returns an array with the separated < i > key < / i > and < i > value < / i > parts .
* If the parameter does not match this required pattern , then < code > null < / code > will be returned .
* @ param keyValuePair a { @ link String } with a < i > key < / i > prefix of length > 0 , a ' = ' in the middle and a
* < i > value < / i > suffix of length > 0
* @ return array with the separated < i > key < / i > and < i > value < / i > parts or < code > null < / code > . */
public static String [ ] splitKeyValuePair ( final String keyValuePair ) { } } | String [ ] split = keyValuePair . split ( TextKeyword . EQUALS ) ; if ( split . length != 2 || split [ 0 ] . length ( ) == 0 || split [ 1 ] . length ( ) == 0 ) return null ; return split ; |
public class Configurer { /** * Get a string in the xml tree .
* @ param defaultValue Value used if node does not exist .
* @ param attribute The attribute to get as string .
* @ param path The node path ( child list )
* @ return The string value .
* @ throws LionEngineException If unable to read node . */
public final String getStringDefault ( String defaultValue , String attribute , String ... path ) { } } | return getNodeStringDefault ( defaultValue , attribute , path ) ; |
public class ZMQ { /** * Stable / legacy context API */
public static Ctx init ( int ioThreads ) { } } | Utils . checkArgument ( ioThreads >= 0 , "I/O threads must not be negative" ) ; Ctx ctx = createContext ( ) ; setContextOption ( ctx , ZMQ_IO_THREADS , ioThreads ) ; return ctx ; |
public class DescribeExportTasksRequest { /** * The export task IDs .
* @ return The export task IDs . */
public java . util . List < String > getExportTaskIds ( ) { } } | if ( exportTaskIds == null ) { exportTaskIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return exportTaskIds ; |
public class RpcManager { /** * Load and init the { @ link HttpRpcPlugin } s provided as an array of
* { @ code pluginClassNames } .
* @ param mode is this TSD in read / write ( " rw " ) or read - only ( " ro " )
* mode ?
* @ param pluginClassNames fully - qualified class names that are
* instances of { @ link HttpRpcPlugin } s
* @ param http a map of canonicalized paths
* ( obtained via { @ link # canonicalizePluginPath ( String ) } )
* to { @ link HttpRpcPlugin } instance . */
@ VisibleForTesting protected void initializeHttpRpcPlugins ( final OperationMode mode , final String [ ] pluginClassNames , final ImmutableMap . Builder < String , HttpRpcPlugin > http ) { } } | for ( final String plugin : pluginClassNames ) { final HttpRpcPlugin rpc = createAndInitialize ( plugin , HttpRpcPlugin . class ) ; validateHttpRpcPluginPath ( rpc . getPath ( ) ) ; final String path = rpc . getPath ( ) . trim ( ) ; final String canonicalized_path = canonicalizePluginPath ( path ) ; http . put ( canonicalized_path , rpc ) ; LOG . info ( "Mounted HttpRpcPlugin [{}] at path \"{}\"" , rpc . getClass ( ) . getName ( ) , canonicalized_path ) ; } |
public class XMLCaster { /** * casts a value to a XML Comment Object
* @ param doc XML Document
* @ param o Object to cast
* @ return XML Comment Object
* @ throws PageException */
public static Comment toComment ( Document doc , Object o ) throws PageException { } } | if ( o instanceof Comment ) return ( Comment ) o ; else if ( o instanceof CharacterData ) return doc . createComment ( ( ( CharacterData ) o ) . getData ( ) ) ; return doc . createComment ( Caster . toString ( o ) ) ; |
public class KeyVaultClientBaseImpl { /** * Lists deleted storage accounts for the specified vault .
* The Get Deleted Storage Accounts operation returns the storage accounts that have been deleted for a vault enabled for soft - delete . This operation requires the storage / list permission .
* @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; DeletedStorageAccountItem & gt ; object */
public Observable < Page < DeletedStorageAccountItem > > getDeletedStorageAccountsAsync ( final String vaultBaseUrl ) { } } | return getDeletedStorageAccountsWithServiceResponseAsync ( vaultBaseUrl ) . map ( new Func1 < ServiceResponse < Page < DeletedStorageAccountItem > > , Page < DeletedStorageAccountItem > > ( ) { @ Override public Page < DeletedStorageAccountItem > call ( ServiceResponse < Page < DeletedStorageAccountItem > > response ) { return response . body ( ) ; } } ) ; |
public class GetOrderRequest { /** * Read members from a MwsReader .
* @ param r
* The reader to read from . */
@ Override public void readFragmentFrom ( MwsReader r ) { } } | sellerId = r . read ( "SellerId" , String . class ) ; amazonOrderId = r . readList ( "AmazonOrderId" , "Id" , String . class ) ; |
public class DeploymentOperationsInner { /** * Gets all deployments operations for a deployment .
* @ param resourceGroupName The name of the resource group . The name is case insensitive .
* @ param deploymentName The name of the deployment with the operation to get .
* @ param top The number of results to return .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; DeploymentOperationInner & gt ; object */
public Observable < Page < DeploymentOperationInner > > listByResourceGroupAsync ( final String resourceGroupName , final String deploymentName , final Integer top ) { } } | return listByResourceGroupWithServiceResponseAsync ( resourceGroupName , deploymentName , top ) . map ( new Func1 < ServiceResponse < Page < DeploymentOperationInner > > , Page < DeploymentOperationInner > > ( ) { @ Override public Page < DeploymentOperationInner > call ( ServiceResponse < Page < DeploymentOperationInner > > response ) { return response . body ( ) ; } } ) ; |
public class SignedDuoCookie { /** * Signs the given arbitrary string data with the given key using the
* algorithm defined by SIGNATURE _ ALGORITHM . Both the data and the key will
* be interpreted as UTF - 8 bytes .
* @ param key
* The key which should be used to sign the given data .
* @ param data
* The data being signed .
* @ return
* The signature produced by signing the given data with the given key ,
* encoded as lowercase hexadecimal .
* @ throws GuacamoleException
* If the given signing key is invalid . */
private static String sign ( String key , String data ) throws GuacamoleException { } } | try { // Attempt to sign UTF - 8 bytes of provided data
Mac mac = Mac . getInstance ( SIGNATURE_ALGORITHM ) ; mac . init ( new SecretKeySpec ( key . getBytes ( "UTF-8" ) , SIGNATURE_ALGORITHM ) ) ; // Return signature as hex
return BaseEncoding . base16 ( ) . lowerCase ( ) . encode ( mac . doFinal ( data . getBytes ( "UTF-8" ) ) ) ; } // Re - throw any errors which prevent signature
catch ( InvalidKeyException e ) { throw new GuacamoleServerException ( "Signing key is invalid." , e ) ; } // Throw hard errors if standard pieces of Java are missing
catch ( UnsupportedEncodingException e ) { throw new UnsupportedOperationException ( "Unexpected lack of UTF-8 support." , e ) ; } catch ( NoSuchAlgorithmException e ) { throw new UnsupportedOperationException ( "Unexpected lack of support " + "for required signature algorithm " + "\"" + SIGNATURE_ALGORITHM + "\"." , e ) ; } |
public class ListManagementImagesImpl { /** * Deletes an image from the list with list Id and image Id passed .
* @ param listId List Id of the image list .
* @ param imageId Id of the image .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws APIErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the String object if successful . */
public String deleteImage ( String listId , String imageId ) { } } | return deleteImageWithServiceResponseAsync ( listId , imageId ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class IOUtils { /** * Computes inverse document frequencies as a map of Strings to Doubles from a tab separated file of term document
* frequencies .
* @ param reader
* the input reader .
* @ param N
* the number of documents in the collection .
* @ return the inverse document frequencies .
* @ throws IOException */
public static Map < String , Double > readIdfs ( BufferedReader reader , int N ) throws IOException { } } | HashMap < String , Double > idfs = new HashMap < String , Double > ( ) ; String nextLine ; while ( ( ( nextLine = reader . readLine ( ) ) != null ) ) { String [ ] parts = nextLine . split ( "\\t+" ) ; checkArgument ( parts . length >= 2 ) ; Double df = Double . parseDouble ( parts [ 1 ] ) ; idfs . put ( parts [ 0 ] , Math . log ( 1.0 + N / df ) ) ; // idf
} return idfs ; |
public class Futures { /** * Returns a new { @ code ListenableFuture } whose result is asynchronously
* derived from the result of the given { @ code Future } . More precisely , the
* returned { @ code Future } takes its result from a { @ code Future } produced by
* applying the given { @ code AsyncFunction } to the result of the original
* { @ code Future } . Example :
* < pre > { @ code
* ListenableFuture < RowKey > rowKeyFuture = indexService . lookUp ( query ) ;
* AsyncFunction < RowKey , QueryResult > queryFunction =
* new AsyncFunction < RowKey , QueryResult > ( ) {
* public ListenableFuture < QueryResult > apply ( RowKey rowKey ) {
* return dataService . read ( rowKey ) ;
* ListenableFuture < QueryResult > queryFuture =
* transform ( rowKeyFuture , queryFunction ) ; } < / pre >
* < p > Note : If the derived { @ code Future } is slow or heavyweight to create
* ( whether the { @ code Future } itself is slow or heavyweight to complete is
* irrelevant ) , consider { @ linkplain # transform ( ListenableFuture ,
* AsyncFunction , Executor ) supplying an executor } . If you do not supply an
* executor , { @ code transform } will use a
* { @ linkplain MoreExecutors # directExecutor direct executor } , which carries
* some caveats for heavier operations . For example , the call to { @ code
* function . apply } may run on an unpredictable or undesirable thread :
* < ul >
* < li > If the input { @ code Future } is done at the time { @ code transform } is
* called , { @ code transform } will call { @ code function . apply } inline .
* < li > If the input { @ code Future } is not yet done , { @ code transform } will
* schedule { @ code function . apply } to be run by the thread that completes the
* input { @ code Future } , which may be an internal system thread such as an
* RPC network thread .
* < / ul >
* < p > Also note that , regardless of which thread executes the { @ code
* function . apply } , all other registered but unexecuted listeners are
* prevented from running during its execution , even if those listeners are
* to run in other executors .
* < p > The returned { @ code Future } attempts to keep its cancellation state in
* sync with that of the input future and that of the future returned by the
* function . That is , if the returned { @ code Future } is cancelled , it will
* attempt to cancel the other two , and if either of the other two is
* cancelled , the returned { @ code Future } will receive a callback in which it
* will attempt to cancel itself .
* @ param input The future to transform
* @ param function A function to transform the result of the input future
* to the result of the output future
* @ return A future that holds result of the function ( if the input succeeded )
* or the original input ' s failure ( if not )
* @ since 11.0 */
public static < I , O > ListenableFuture < O > transform ( ListenableFuture < I > input , AsyncFunction < ? super I , ? extends O > function ) { } } | ChainingListenableFuture < I , O > output = new ChainingListenableFuture < I , O > ( function , input ) ; input . addListener ( output , directExecutor ( ) ) ; return output ; |
public class DBInitializerHelper { /** * Returns { @ link DatabaseStructureType } based on workspace configuration . */
public static DatabaseStructureType getDatabaseType ( WorkspaceEntry wsConfig ) throws RepositoryConfigurationException { } } | try { if ( wsConfig . getContainer ( ) . getParameterBoolean ( "multi-db" ) ) { return JDBCDataContainerConfig . DatabaseStructureType . MULTI ; } else { return JDBCDataContainerConfig . DatabaseStructureType . SINGLE ; } } catch ( Exception e ) { String dbStructureType = wsConfig . getContainer ( ) . getParameterValue ( JDBCWorkspaceDataContainer . DB_STRUCTURE_TYPE ) . toUpperCase ( ) ; return JDBCDataContainerConfig . DatabaseStructureType . valueOf ( dbStructureType ) ; } |
public class DToA { /** * XXXX the C version built a cache of these */
static BigInteger pow5mult ( BigInteger b , int k ) { } } | return b . multiply ( BigInteger . valueOf ( 5 ) . pow ( k ) ) ; |
public class LoggingFormatter { /** * Format the given LogRecord .
* @ param record
* the log record to be formatted .
* @ return a formatted log record */
@ Override public synchronized String format ( LogRecord record ) { } } | StringBuilder sb = new StringBuilder ( ) ; ZonedDateTime timestamp = ZonedDateTime . ofInstant ( Instant . ofEpochMilli ( record . getMillis ( ) ) , ZoneId . systemDefault ( ) ) ; sb . append ( formatter . format ( timestamp ) ) ; sb . append ( " | " ) ; // $ NON - NLS - 1 $
if ( record . getSourceClassName ( ) != null ) { sb . append ( truncate ( record . getSourceClassName ( ) . substring ( record . getSourceClassName ( ) . lastIndexOf ( '.' ) + 1 ) , 30 ) ) ; } else { sb . append ( truncate ( record . getLoggerName ( ) , 10 ) ) ; } sb . append ( " | " ) ; // $ NON - NLS - 1 $
if ( record . getSourceMethodName ( ) != null ) { sb . append ( truncate ( record . getSourceMethodName ( ) , 30 ) ) ; } sb . append ( " | " ) ; // $ NON - NLS - 1 $
String message = formatMessage ( record ) ; sb . append ( truncate ( record . getLevel ( ) . getLocalizedName ( ) , MAX_LEVEL_SIZE ) ) ; sb . append ( " | " ) ; // $ NON - NLS - 1 $
sb . append ( message ) ; sb . append ( System . getProperty ( "line.separator" ) ) ; // $ NON - NLS - 1 $
if ( record . getThrown ( ) != null ) { StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; record . getThrown ( ) . printStackTrace ( pw ) ; pw . close ( ) ; sb . append ( sw . toString ( ) ) ; } return sb . toString ( ) ; |
public class LLIndexPage { /** * Locate the ( first ) page that could contain the given key .
* In the inner pages , the keys are the minimum values of the sub - page . The value is
* the according minimum value of the first key of the sub - page .
* @ param key
* @ return Page for that key */
public LLIndexPage locatePageForKey ( long key , long value , boolean allowCreate ) { } } | if ( isLeaf ) { return this ; } if ( nEntries == - 1 && ! allowCreate ) { return null ; } // The stored value [ i ] is the min - values of the according page [ i + 1}
int pos = binarySearch ( 0 , nEntries , key , value ) ; if ( pos >= 0 ) { // pos of matching key
pos ++ ; } else { pos = - ( pos + 1 ) ; } // TODO use weak refs
// read page before that value
LLIndexPage page = ( LLIndexPage ) readOrCreatePage ( pos , allowCreate ) ; return page . locatePageForKey ( key , value , allowCreate ) ; |
public class QueryParametersLazyList { /** * This is reference implementation of universal update function for updateable ResultSets .
* Currently it is unoptimized , due to possible compatibility issues .
* @ param row row which should be updated
* @ param params values which would be used to update current row
* @ throws SQLException */
private void updateResultSetRow ( int row , QueryParameters params ) throws SQLException { } } | int currentRow = getCurrentResultSet ( ) . getRow ( ) ; if ( currentRow == 0 ) { // before first or last
if ( getCurrentResultSet ( ) . isAfterLast ( ) == true ) { // positioning on last
getCurrentResultSet ( ) . last ( ) ; } else if ( getCurrentResultSet ( ) . isBeforeFirst ( ) == true ) { // position on first
getCurrentResultSet ( ) . first ( ) ; } else { throw new MjdbcRuntimeException ( "ResultSet need to be repositioned to get row different than zero" ) ; } currentRow = getCurrentResultSet ( ) . getRow ( ) ; } if ( currentRow == row ) { updateResultSetCurrentLine ( getCurrentResultSet ( ) , params ) ; } if ( useRelativePositioning == true ) { getCurrentResultSet ( ) . relative ( row - currentRow ) ; if ( getCurrentResultSet ( ) . getRow ( ) > 0 ) { updateResultSetCurrentLine ( getCurrentResultSet ( ) , params ) ; } } else if ( currentRow < row ) { while ( getCurrentResultSet ( ) . next ( ) == true ) { if ( getCurrentResultSet ( ) . getRow ( ) == row ) { updateResultSetCurrentLine ( getCurrentResultSet ( ) , params ) ; } } } else { while ( getCurrentResultSet ( ) . previous ( ) == true ) { if ( getCurrentResultSet ( ) . getRow ( ) == row ) { updateResultSetCurrentLine ( getCurrentResultSet ( ) , params ) ; } } } |
public class Streams { /** * Copy from an { @ link InputStream } to an { @ link OutputStream }
* without flushing OutputStream during copy */
public static void copy ( InputStream in , OutputStream out ) throws IOException { } } | copy ( in , out , false ) ; |
public class MultimapAlignment { /** * if it ' s in the map , it must be RightT */
@ SuppressWarnings ( "unchecked" ) @ Override public Set < LeftT > alignedToRightItem ( final Object rightItem ) { } } | if ( rightToLeft . containsKey ( rightItem ) ) { return rightToLeft . get ( ( RightT ) rightItem ) ; } else { return ImmutableSet . of ( ) ; } |
public class ReaderGroupStateManager { /** * Fetch the configured end offset for a configured segment . If end offset is not configured return Long . MAX _ VALUE .
* @ param segment Segment .
* @ return endOffset of the segment . */
long getEndOffsetForSegment ( Segment segment ) { } } | return Optional . ofNullable ( sync . getState ( ) . getEndSegments ( ) . get ( segment ) ) . orElse ( Long . MAX_VALUE ) ; |
public class KeyUtil { /** * 从KeyStore中获取私钥公钥
* @ param keyStore { @ link KeyStore }
* @ param password 密码
* @ param alias 别名
* @ return { @ link KeyPair }
* @ since 4.4.1 */
public static KeyPair getKeyPair ( KeyStore keyStore , char [ ] password , String alias ) { } } | PublicKey publicKey ; PrivateKey privateKey ; try { publicKey = keyStore . getCertificate ( alias ) . getPublicKey ( ) ; privateKey = ( PrivateKey ) keyStore . getKey ( alias , password ) ; } catch ( Exception e ) { throw new CryptoException ( e ) ; } return new KeyPair ( publicKey , privateKey ) ; |
public class ScriptWrapper { /** * Saves the latest contents into the configured file
* @ throws IOException
* @ since TODO add version */
void saveScript ( ) throws IOException { } } | // We ' ll always try to read it in with the default next time its loaded
this . charset = ExtensionScript . DEFAULT_CHARSET ; try ( BufferedWriter bw = Files . newBufferedWriter ( file . toPath ( ) , charset ) ) { bw . append ( getContents ( ) ) ; } this . lastModified = file . lastModified ( ) ; |
public class CmsObject { /** * Returns a list with all sub resources of a given folder that have set the given property ,
* matching the current property ' s value with the given old value and replacing it by a given new value . < p >
* @ param resourcename the name of the resource to change the property value
* @ param property the name of the property to change the value
* @ param oldValue the old value of the property , can be a regular expression
* @ param newValue the new value of the property
* @ param recursive if true , change recursively all property values on sub - resources ( only for folders )
* @ return a list with the < code > { @ link CmsResource } < / code > ' s where the property value has been changed
* @ throws CmsException if operation was not successful */
public List < CmsResource > changeResourcesInFolderWithProperty ( String resourcename , String property , String oldValue , String newValue , boolean recursive ) throws CmsException { } } | CmsResource resource = readResource ( resourcename , CmsResourceFilter . IGNORE_EXPIRATION ) ; return m_securityManager . changeResourcesInFolderWithProperty ( m_context , resource , property , oldValue , newValue , recursive ) ; |
public class HttpJsonSerializer { /** * Parses a tree ID and optional list of TSUIDs to search for collisions or
* not matched TSUIDs .
* @ return A map with " treeId " as an integer and optionally " tsuids " as a
* List < String >
* @ throws JSONException if parsing failed
* @ throws BadRequestException if the content was missing or parsing failed */
public Map < String , Object > parseTreeTSUIDsListV1 ( ) { } } | final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } return JSON . parseToObject ( json , TR_HASH_MAP_OBJ ) ; |
public class ListValue { /** * Creates a { @ code ListValue } object given a number of { @ code LatLng } values . */
public static ListValue of ( LatLng first , LatLng ... other ) { } } | return newBuilder ( ) . addValue ( first , other ) . build ( ) ; |
public class S3ObjectWrapper { /** * Returns true if this S3 object has the encryption information stored
* as user meta data ; false otherwise . */
final boolean hasEncryptionInfo ( ) { } } | ObjectMetadata metadata = s3obj . getObjectMetadata ( ) ; Map < String , String > userMeta = metadata . getUserMetadata ( ) ; return userMeta != null && userMeta . containsKey ( Headers . CRYPTO_IV ) && ( userMeta . containsKey ( Headers . CRYPTO_KEY_V2 ) || userMeta . containsKey ( Headers . CRYPTO_KEY ) ) ; |
public class SipApplicationDispatcherImpl { /** * ( non - Javadoc )
* @ see javax . sip . SipListener # processDialogTerminated ( javax . sip . DialogTerminatedEvent ) */
public void processDialogTerminated ( final DialogTerminatedEvent dialogTerminatedEvent ) { } } | final Dialog dialog = dialogTerminatedEvent . getDialog ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Dialog Terminated => dialog Id : " + dialogTerminatedEvent . getDialog ( ) . getDialogId ( ) ) ; } getAsynchronousExecutor ( ) . execute ( new Runnable ( ) { // https : / / github . com / RestComm / sip - servlets / issues / 107 guard against NPEon concurrent cleanup
final TransactionApplicationData dialogAppData = ( TransactionApplicationData ) dialog . getApplicationData ( ) ; public void run ( ) { try { boolean appDataFound = false ; TransactionApplicationData txAppData = null ; if ( dialogAppData != null ) { if ( dialogAppData . getSipServletMessage ( ) == null ) { Transaction transaction = dialogAppData . getTransaction ( ) ; if ( transaction != null && transaction . getApplicationData ( ) != null ) { txAppData = ( TransactionApplicationData ) transaction . getApplicationData ( ) ; txAppData . cleanUp ( ) ; } } else { MobicentsSipSessionKey sipSessionKey = dialogAppData . getSipSessionKey ( ) ; tryToInvalidateSession ( sipSessionKey , false ) ; } dialogAppData . cleanUp ( ) ; // since the stack doesn ' t nullify the app data , we need to do it to let go of the refs
dialog . setApplicationData ( null ) ; } if ( ! appDataFound && logger . isDebugEnabled ( ) ) { logger . debug ( "no application data for this dialog " + dialog . getDialogId ( ) ) ; } } catch ( Exception e ) { logger . error ( "Problem handling dialog termination" , e ) ; } } } ) ; |
public class ProjDepTreeModule { /** * Returns true if the tensor contains zeros . */
public static boolean containsZeros ( Tensor tmFalseIn ) { } } | Algebra s = tmFalseIn . getAlgebra ( ) ; for ( int c = 0 ; c < tmFalseIn . size ( ) ; c ++ ) { if ( tmFalseIn . getValue ( c ) == s . zero ( ) ) { return true ; } } return false ; |
public class ReadSecondaryHandler { /** * This method is specifically for making sure a handle is moved to this field on valid .
* The field must be a ReferenceField .
* @ param iFieldSeq int On valid , move to this field . */
public MoveOnValidHandler addFieldSeqPair ( int iFieldSeq ) { } } | m_bMoveBehavior = true ; MoveOnValidHandler moveBehavior = new MoveOnValidHandler ( this . getOwner ( ) . getRecord ( ) . getField ( iFieldSeq ) ) ; m_record . addListener ( moveBehavior ) ; return moveBehavior ; |
public class BuddyListInitEventHandler { /** * / * ( non - Javadoc )
* @ see com . tvd12 . ezyfox . sfs2x . serverhandler . UserZoneEventHandler # handleServerEvent ( com . smartfoxserver . v2 . core . ISFSEvent ) */
@ Override public void handleServerEvent ( ISFSEvent event ) throws SFSException { } } | User sfsUser = ( User ) event . getParameter ( SFSEventParam . USER ) ; ApiUser apiUser = ( ApiUser ) sfsUser . getProperty ( APIKey . USER ) ; if ( apiUser . getBuddyProperties ( ) != null ) updateBuddyProperties ( sfsUser . getBuddyProperties ( ) , apiUser . getBuddyProperties ( ) ) ; removeAddedBuddies ( sfsUser ) ; super . handleServerEvent ( event ) ; |
public class JenkinsHashFunction { /** * Mix up the values in the hash function . */
private void hashMix ( ) { } } | a = subtract ( a , b ) ; a = subtract ( a , c ) ; a = xor ( a , c >> 13 ) ; b = subtract ( b , c ) ; b = subtract ( b , a ) ; b = xor ( b , leftShift ( a , 8 ) ) ; c = subtract ( c , a ) ; c = subtract ( c , b ) ; c = xor ( c , ( b >> 13 ) ) ; a = subtract ( a , b ) ; a = subtract ( a , c ) ; a = xor ( a , ( c >> 12 ) ) ; b = subtract ( b , c ) ; b = subtract ( b , a ) ; b = xor ( b , leftShift ( a , 16 ) ) ; c = subtract ( c , a ) ; c = subtract ( c , b ) ; c = xor ( c , ( b >> 5 ) ) ; a = subtract ( a , b ) ; a = subtract ( a , c ) ; a = xor ( a , ( c >> 3 ) ) ; b = subtract ( b , c ) ; b = subtract ( b , a ) ; b = xor ( b , leftShift ( a , 10 ) ) ; c = subtract ( c , a ) ; c = subtract ( c , b ) ; c = xor ( c , ( b >> 15 ) ) ; |
public class DynamoDBMapper { /** * Queries an Amazon DynamoDB table and returns the matching results as an
* unmodifiable list of instantiated objects , using the default
* configuration .
* @ see DynamoDBMapper # query ( Class , DynamoDBQueryExpression ,
* DynamoDBMapperConfig ) */
public < T > PaginatedQueryList < T > query ( Class < T > clazz , DynamoDBQueryExpression queryExpression ) { } } | return query ( clazz , queryExpression , config ) ; |
public class TextUtils { /** * Utility method using a precompiled pattern instead of using the
* replaceAll method of the String class . This method will also be reusing
* Matcher objects .
* @ see java . util . regex . Pattern
* @ param pattern precompiled Pattern to match against
* @ param input the character sequence to check
* @ param replacement the String to substitute every match with
* @ return the String with all the matches substituted */
public static String replaceAll ( String pattern , CharSequence input , String replacement ) { } } | input = new InterruptibleCharSequence ( input ) ; Matcher m = getMatcher ( pattern , input ) ; String res = m . replaceAll ( replacement ) ; recycleMatcher ( m ) ; return res ; |
public class XMLContentHandler { /** * Creates the { @ link Node } in the repository from the given attributes .
* @ param parent
* inkstand . rules . config - the absolute path to your configuration file for the cluster node inkstand . rules . home
* - the absolute path to the working directory of the cluster node
* More on Jackrabbit configuration can be found on the Apache Jackrabbit project page . the parent node of
* the node to be created . If this is null , a root - level node will be created .
* @ param attributes
* the attributes containing the basic information required to create the node
* @ return the newly creates { @ link Node }
* @ throws RepositoryException
* if the new node can not be created */
private Node newNode ( final Node parent , final Attributes attributes ) throws RepositoryException { } } | Node parentNode ; if ( parent == null ) { parentNode = this . session . getRootNode ( ) ; } else { parentNode = parent ; } // TODO handle path parameters
final String name = attributes . getValue ( "name" ) ; final String primaryType = attributes . getValue ( "primaryType" ) ; LOG . info ( "Node {} adding child node {}(type={})" , parentNode . getPath ( ) , name , primaryType ) ; return parentNode . addNode ( name , primaryType ) ; |
public class DeleteItemSpec { /** * Convenient method to specify expressions ( and the associated name map and
* value map ) via { @ link DeleteItemExpressionSpec } . */
@ Beta public DeleteItemSpec withExpressionSpec ( DeleteItemExpressionSpec xspec ) { } } | return withConditionExpression ( xspec . getConditionExpression ( ) ) . withNameMap ( xspec . getNameMap ( ) ) . withValueMap ( xspec . getValueMap ( ) ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.