signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Intersectionf { /** * Test whether the one sphere with intersects the other sphere , and store the center of the circle of * intersection in the < code > ( x , y , z ) < / code > components of the supplied vector and the radius of that circle in the w component . * The normal vector of the circle of intersection can simply be obtained by subtracting the center of either sphere from the other . * Reference : < a href = " http : / / gamedev . stackexchange . com / questions / 75756 / sphere - sphere - intersection - and - circle - sphere - intersection " > http : / / gamedev . stackexchange . com < / a > * @ param sphereA * the first sphere * @ param sphereB * the second sphere * @ param centerAndRadiusOfIntersectionCircle * will hold the center of the circle of intersection in the < code > ( x , y , z ) < / code > components and the radius in the w component * @ return < code > true < / code > iff both spheres intersect ; < code > false < / code > otherwise */ public static boolean intersectSphereSphere ( Spheref sphereA , Spheref sphereB , Vector4f centerAndRadiusOfIntersectionCircle ) { } }
return intersectSphereSphere ( sphereA . x , sphereA . y , sphereA . z , sphereA . r * sphereA . r , sphereB . x , sphereB . y , sphereB . z , sphereB . r * sphereB . r , centerAndRadiusOfIntersectionCircle ) ;
public class SparseMisoSceneModel { /** * Informs the supplied visitor of each object in this scene . * @ param interestingOnly if true , only the interesting objects will * be visited . */ public void visitObjects ( ObjectVisitor visitor , boolean interestingOnly ) { } }
for ( Iterator < Section > iter = getSections ( ) ; iter . hasNext ( ) ; ) { Section sect = iter . next ( ) ; for ( ObjectInfo oinfo : sect . objectInfo ) { visitor . visit ( oinfo ) ; } if ( ! interestingOnly ) { for ( int oo = 0 ; oo < sect . objectTileIds . length ; oo ++ ) { ObjectInfo info = new ObjectInfo ( ) ; info . tileId = sect . objectTileIds [ oo ] ; info . x = sect . objectXs [ oo ] ; info . y = sect . objectYs [ oo ] ; visitor . visit ( info ) ; } } }
public class MinioClient { /** * Executes HEAD method for given request parameters . * @ param bucketName Bucket name . * @ param objectName Object name in the bucket . * @ param headerMap Map of header parameters of the request . */ private HttpResponse executeHead ( String bucketName , String objectName , Map < String , String > headerMap ) throws InvalidBucketNameException , NoSuchAlgorithmException , InsufficientDataException , IOException , InvalidKeyException , NoResponseException , XmlPullParserException , ErrorResponseException , InternalException { } }
HttpResponse response = execute ( Method . HEAD , getRegion ( bucketName ) , bucketName , objectName , headerMap , null , null , 0 ) ; response . body ( ) . close ( ) ; return response ;
public class Time { /** * Generating Agency Specific Reports * @ param company Company ID * @ param agency Agency ID * @ param params Parameters * @ throwsJSONException If error occurred * @ return { @ link JSONObject } */ public JSONObject getByAgency ( String company , String agency , HashMap < String , String > params ) throws JSONException { } }
return _getByType ( company , null , agency , params , false ) ;
public class Bridge { /** * Convenience factory method to create a Bridge object from two call ids * @ param callId1 the call id * @ param callId2 the call id * @ return the Bridge * @ throws IOException unexpected error . */ public static Bridge create ( final String callId1 , final String callId2 ) throws Exception { } }
assert ( callId1 != null ) ; final BandwidthClient client = BandwidthClient . getInstance ( ) ; return create ( client , callId1 , callId2 ) ;
public class GanttDesignerReader { /** * Read remarks from a Gantt Designer file . * @ param gantt Gantt Designer file */ private void processRemarks ( Gantt gantt ) { } }
processRemarks ( gantt . getRemarks ( ) ) ; processRemarks ( gantt . getRemarks1 ( ) ) ; processRemarks ( gantt . getRemarks2 ( ) ) ; processRemarks ( gantt . getRemarks3 ( ) ) ; processRemarks ( gantt . getRemarks4 ( ) ) ;
public class AbstractAliasDestinationHandler { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # notifyReceiveAllowedRCD ( com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler ) */ @ Override public void notifyReceiveAllowedRCD ( DestinationHandler destinationHandler ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "notifyReceiveAllowedRCD" , new Object [ ] { destinationHandler } ) ; _targetDestinationHandler . notifyReceiveAllowedRCD ( destinationHandler ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "notifyReceiveAllowedRCD" ) ;
public class JSONWriter { /** * value . * @ param value value . * @ return this . * @ throws IOException */ public JSONWriter valueString ( String value ) throws IOException { } }
beforeValue ( ) ; writer . write ( JSON . QUOTE ) ; writer . write ( escape ( value ) ) ; writer . write ( JSON . QUOTE ) ; return this ;
public class HtmlOutcomeTargetLink { /** * < p > Set the value of the < code > onkeydown < / code > property . < / p > */ public void setOnkeydown ( java . lang . String onkeydown ) { } }
getStateHelper ( ) . put ( PropertyKeys . onkeydown , onkeydown ) ; handleAttribute ( "onkeydown" , onkeydown ) ;
public class ExpandableExtension { /** * expands all expandable items * @ param notifyItemChanged true if we need to call notifyItemChanged . DEFAULT : false */ public void expand ( boolean notifyItemChanged ) { } }
int length = mFastAdapter . getItemCount ( ) ; for ( int i = length - 1 ; i >= 0 ; i -- ) { expand ( i , notifyItemChanged ) ; }
public class Multimap { /** * Replaces all of the specified < code > oldValue < / code > with the specified < code > newValue < / code > . * < code > False < / code > is returned if no < code > oldValue < / code > is found . * @ param key * @ param oldValues * @ param newValue * @ return < code > true < / code > if this Multimap is modified by this operation , otherwise < code > false < / code > . */ public boolean replaceAll ( final K key , final Collection < ? > oldValues , final E newValue ) { } }
final V val = valueMap . get ( key ) ; if ( val == null ) { return false ; } if ( val . removeAll ( oldValues ) ) { val . add ( newValue ) ; return true ; } return false ;
public class CommercePriceListAccountRelUtil { /** * Returns the first commerce price list account rel in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce price list account rel , or < code > null < / code > if a matching commerce price list account rel could not be found */ public static CommercePriceListAccountRel fetchByUuid_C_First ( String uuid , long companyId , OrderByComparator < CommercePriceListAccountRel > orderByComparator ) { } }
return getPersistence ( ) . fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ;
public class JsMainAdminServiceImpl { /** * { @ inheritDoc } * Constructs the new config object based on the server . xml changes . There * are few rules while constructing 1 ) If the defaultQueue or defaultTopic * is deleted it will not be taken into consideration . Old values are * retained 2 ) Filestore changes are not honoured hence old values will be * considered */ @ Override public void modified ( ComponentContext context , Map < String , Object > properties , ConfigurationAdmin configAdmin ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "modified" , new Object [ ] { context , properties , configAdmin } ) ; } this . properties = properties ; internalModify ( configAdmin ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , "modified" ) ; }
public class AbstractCache { /** * Checks if a value is present in the cache . If the disk cached is enabled , this will also * check whether the value has been persisted to disk . * @ param key * the cache key * @ return true if the value is cached in memory or on disk , false otherwise */ @ SuppressWarnings ( "unchecked" ) public synchronized boolean containsKey ( Object key ) { } }
return cache . containsKey ( key ) || ( isDiskCacheEnabled && getFileForKey ( ( KeyT ) key ) . exists ( ) ) ;
public class ViewHandlerImpl { /** * Get the locales specified as acceptable by the original request , compare them to the * locales supported by this Application and return the best match . */ @ Override public Locale calculateLocale ( FacesContext facesContext ) { } }
Application application = facesContext . getApplication ( ) ; for ( Iterator < Locale > requestLocales = facesContext . getExternalContext ( ) . getRequestLocales ( ) ; requestLocales . hasNext ( ) ; ) { Locale requestLocale = requestLocales . next ( ) ; for ( Iterator < Locale > supportedLocales = application . getSupportedLocales ( ) ; supportedLocales . hasNext ( ) ; ) { Locale supportedLocale = supportedLocales . next ( ) ; // higher priority to a language match over an exact match // that occurs further down ( see JSTL Reference 1.0 8.3.1) if ( requestLocale . getLanguage ( ) . equals ( supportedLocale . getLanguage ( ) ) && ( supportedLocale . getCountry ( ) == null || supportedLocale . getCountry ( ) . length ( ) == 0 ) ) { return supportedLocale ; } else if ( supportedLocale . equals ( requestLocale ) ) { return supportedLocale ; } } } Locale defaultLocale = application . getDefaultLocale ( ) ; return defaultLocale != null ? defaultLocale : Locale . getDefault ( ) ;
public class AbstractMinMaxTextBox { /** * set minimum allowed value . * @ param pmin minimum value allowed */ public void setMin ( final String pmin ) { } }
try { this . setMin ( this . numberParser . parse ( pmin ) ) ; } catch ( final ParseException e ) { this . setMin ( ( T ) null ) ; }
public class TableDef { /** * Returns the foreignkey to the specified table . * @ param name The name of the foreignkey * @ param tableName The name of the referenced table * @ return The foreignkey def or < code > null < / code > if it does not exist */ public ForeignkeyDef getForeignkey ( String name , String tableName ) { } }
String realName = ( name == null ? "" : name ) ; ForeignkeyDef def = null ; for ( Iterator it = getForeignkeys ( ) ; it . hasNext ( ) ; ) { def = ( ForeignkeyDef ) it . next ( ) ; if ( realName . equals ( def . getName ( ) ) && def . getTableName ( ) . equals ( tableName ) ) { return def ; } } return null ;
public class RangeStatisticImpl { /** * Synchronizable : counter is " updated " with the input value */ final public void increment ( long incVal ) { } }
if ( enabled ) { // synchronize the update of lastValue lastSampleTime = updateIntegral ( ) ; synchronized ( this ) { current += incVal ; } updateWaterMark ( ) ; } else { current += incVal ; }
public class PrimaveraXERFileReader { /** * Reads each token from a single record and adds it to a list . * @ param tk tokenizer * @ param record list of tokens * @ throws IOException */ private void readRecord ( Tokenizer tk , List < String > record ) throws IOException { } }
record . clear ( ) ; while ( tk . nextToken ( ) == Tokenizer . TT_WORD ) { record . add ( tk . getToken ( ) ) ; }
public class MetricAlarm { /** * The actions to execute when this alarm transitions to the < code > OK < / code > state from any other state . Each action * is specified as an Amazon Resource Name ( ARN ) . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setOKActions ( java . util . Collection ) } or { @ link # withOKActions ( java . util . Collection ) } if you want to * override the existing values . * @ param oKActions * The actions to execute when this alarm transitions to the < code > OK < / code > state from any other state . Each * action is specified as an Amazon Resource Name ( ARN ) . * @ return Returns a reference to this object so that method calls can be chained together . */ public MetricAlarm withOKActions ( String ... oKActions ) { } }
if ( this . oKActions == null ) { setOKActions ( new com . amazonaws . internal . SdkInternalList < String > ( oKActions . length ) ) ; } for ( String ele : oKActions ) { this . oKActions . add ( ele ) ; } return this ;
public class SparkComputationGraph { /** * Score the examples individually , using a specified batch size . Unlike { @ link # calculateScore ( JavaRDD , boolean ) } , * this method returns a score for each example separately < br > * Note : The provided JavaPairRDD has a key that is associated with each example and returned score . < br > * < b > Note : < / b > The DataSet objects passed in must have exactly one example in them ( otherwise : can ' t have a 1:1 association * between keys and data sets to score ) * @ param data Data to score * @ param includeRegularizationTerms If true : include the l1 / l2 regularization terms with the score ( if any ) * @ param < K > Key type * @ return A { @ code JavaPairRDD < K , Double > } containing the scores of each example * @ see MultiLayerNetwork # scoreExamples ( DataSet , boolean ) */ public < K > JavaPairRDD < K , Double > scoreExamplesMultiDataSet ( JavaPairRDD < K , MultiDataSet > data , boolean includeRegularizationTerms , int batchSize ) { } }
return data . mapPartitionsToPair ( new ScoreExamplesWithKeyFunction < K > ( sc . broadcast ( network . params ( ) ) , sc . broadcast ( conf . toJson ( ) ) , includeRegularizationTerms , batchSize ) ) ;
public class ConfigureEipPropertiesStep { /** * Returns the mandatory String value of the given name * @ throws IllegalArgumentException if the value is not available in the given attribute map */ public static String mandatoryAttributeValue ( Map < Object , Object > attributeMap , String name ) { } }
Object value = attributeMap . get ( name ) ; if ( value != null ) { String text = value . toString ( ) ; if ( ! Strings . isBlank ( text ) ) { return text ; } } throw new IllegalArgumentException ( "The attribute value '" + name + "' did not get passed on from the previous wizard page" ) ;
public class XmlResponsesSaxParser { /** * Parses a ListVersions response XML document from an input stream . * @ param inputStream * XML data input stream . * @ return the XML handler object populated with data parsed from the XML * stream . * @ throws SdkClientException */ public ListVersionsHandler parseListVersionsResponse ( InputStream inputStream , final boolean shouldSDKDecodeResponse ) throws IOException { } }
ListVersionsHandler handler = new ListVersionsHandler ( shouldSDKDecodeResponse ) ; parseXmlInputStream ( handler , sanitizeXmlDocument ( handler , inputStream ) ) ; return handler ;
public class Sources { /** * Gets a source that joins the given collection of sources end to end . * When closed , the returned source will ensure the wrapped sources are * all closed . * @ param collection the collection of sources to join . * @ param < T > the type . * @ return the joined source . */ public static < T > Source < T > join ( Collection < Source < T > > collection ) { } }
return join ( collection . iterator ( ) ) ;
public class DITypeInfo { /** * Retrieves the character ( s ) prefixing a literal of this type . < p > * @ return the character ( s ) prefixing a literal of this type . */ String getLiteralPrefix ( ) { } }
switch ( type ) { case Types . SQL_BINARY : case Types . SQL_BLOB : case Types . SQL_CHAR : case Types . SQL_NCHAR : case Types . SQL_CLOB : case Types . SQL_VARBINARY : case Types . SQL_VARCHAR : case Types . SQL_NVARCHAR : return "'" ; case Types . SQL_DATALINK : return "'" ; // hypothetically : " { url ' " ; case Types . SQL_DATE : return "'" ; // or JDBC escape : " { d ' " ; case Types . OTHER : return "'" ; // hypothetically : " { o ' " ; or new " pkg . cls " ( . . . ) case Types . SQL_TIME_WITH_TIME_ZONE : case Types . SQL_TIME : return "'" ; // or JDBC escape : " { t ' " ; case Types . SQL_TIMESTAMP_WITH_TIME_ZONE : case Types . SQL_TIMESTAMP : return "'" ; // or JDBC escape : " { ts ' " ; case Types . SQL_XML : return "'" ; // hypothetically : " { xml ' " ; default : return null ; }
public class CFFFontSubset { /** * Function calculates the number of ranges in the Charset * @ param NumofGlyphs The number of glyphs in the font * @ param Type The format of the Charset * @ return The number of ranges in the Charset data structure */ int CountRange ( int NumofGlyphs , int Type ) { } }
int num = 0 ; @ SuppressWarnings ( "unused" ) char Sid ; int i = 1 , nLeft ; while ( i < NumofGlyphs ) { num ++ ; Sid = getCard16 ( ) ; if ( Type == 1 ) nLeft = getCard8 ( ) ; else nLeft = getCard16 ( ) ; i += nLeft + 1 ; } return num ;
public class PostProcessorCssImageUrlRewriter { /** * Rewrites the image URL * @ param url * the image URL * @ param binaryServletPath * the binary servlet path * @ param newCssPath * the new Css path * @ param binaryRsHandler * the image resource handler * @ return the rewritten image URL * @ throws IOException * if an IOException occurs */ protected String rewriteURL ( String url , String binaryServletPath , String newCssPath , BinaryResourcesHandler binaryRsHandler ) throws IOException { } }
String imgUrl = url ; if ( isBinaryResource ( imgUrl ) ) { imgUrl = addCacheBuster ( url , binaryRsHandler ) ; // Add image servlet path in the URL , if it ' s defined if ( StringUtils . isNotEmpty ( binaryServletPath ) ) { imgUrl = binaryServletPath + JawrConstant . URL_SEPARATOR + imgUrl ; } } imgUrl = PathNormalizer . asPath ( imgUrl ) ; return PathNormalizer . getRelativeWebPath ( PathNormalizer . getParentPath ( newCssPath ) , imgUrl ) ;
public class MethodWriterImpl { /** * { @ inheritDoc } */ protected Content getNavSummaryLink ( ClassDoc cd , boolean link ) { } }
if ( link ) { if ( cd == null ) { return writer . getHyperLink ( SectionName . METHOD_SUMMARY , writer . getResource ( "doclet.navMethod" ) ) ; } else { return writer . getHyperLink ( SectionName . METHODS_INHERITANCE , configuration . getClassName ( cd ) , writer . getResource ( "doclet.navMethod" ) ) ; } } else { return writer . getResource ( "doclet.navMethod" ) ; }
public class Matrix4f { /** * Set all the values within this matrix to < code > 0 < / code > . * @ return a matrix holding the result */ public Matrix4f zero ( ) { } }
Matrix4f dest = thisOrNew ( ) ; MemUtil . INSTANCE . zero ( dest ) ; _properties ( 0 ) ; return dest ;
public class MalisisGui { /** * Called when the mouse is moved while a button is pressed . */ protected void mouseDragged ( int button ) { } }
try { if ( draggedComponent != null ) draggedComponent . onDrag ( MouseButton . getButton ( button ) ) ; } catch ( Exception e ) { MalisisCore . message ( "A problem occured : " + e . getClass ( ) . getSimpleName ( ) + ": " + e . getMessage ( ) ) ; e . printStackTrace ( new PrintStream ( new FileOutputStream ( FileDescriptor . out ) ) ) ; }
public class ApiError { /** * Gets the fieldPathElements value for this ApiError . * @ return fieldPathElements * A parsed copy of the field path . For example , the field path * " operations [ 1 ] . operand " * corresponds to this list : { FieldPathElement ( field * = " operations " , index = 1 ) , * FieldPathElement ( field = " operand " , index = null ) } . */ public com . google . api . ads . admanager . axis . v201811 . FieldPathElement [ ] getFieldPathElements ( ) { } }
return fieldPathElements ;
public class VmServerMarshaller { /** * Marshall the given parameter object . */ public void marshall ( VmServer vmServer , ProtocolMarshaller protocolMarshaller ) { } }
if ( vmServer == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( vmServer . getVmServerAddress ( ) , VMSERVERADDRESS_BINDING ) ; protocolMarshaller . marshall ( vmServer . getVmName ( ) , VMNAME_BINDING ) ; protocolMarshaller . marshall ( vmServer . getVmManagerName ( ) , VMMANAGERNAME_BINDING ) ; protocolMarshaller . marshall ( vmServer . getVmManagerType ( ) , VMMANAGERTYPE_BINDING ) ; protocolMarshaller . marshall ( vmServer . getVmPath ( ) , VMPATH_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SQLiteDatabase { /** * This method disables the features enabled by { @ link # enableWriteAheadLogging ( ) } . * @ throws IllegalStateException if there are transactions in progress at the * time this method is called . WAL mode can only be changed when there are no * transactions in progress . * @ see # enableWriteAheadLogging */ public void disableWriteAheadLogging ( ) { } }
synchronized ( mLock ) { throwIfNotOpenLocked ( ) ; if ( ( mConfigurationLocked . openFlags & ENABLE_WRITE_AHEAD_LOGGING ) == 0 ) { return ; } mConfigurationLocked . openFlags &= ~ ENABLE_WRITE_AHEAD_LOGGING ; try { mConnectionPoolLocked . reconfigure ( mConfigurationLocked ) ; } catch ( RuntimeException ex ) { mConfigurationLocked . openFlags |= ENABLE_WRITE_AHEAD_LOGGING ; throw ex ; } }
public class JnlpFileHandler { /** * / * Main method to lookup an entry */ public synchronized DownloadResponse getJnlpFile ( JnlpResource jnlpres , DownloadRequest dreq ) throws IOException { } }
String path = jnlpres . getPath ( ) ; URL resource = jnlpres . getResource ( ) ; long lastModified = jnlpres . getLastModified ( ) ; _log . addDebug ( "lastModified: " + lastModified + " " + new Date ( lastModified ) ) ; if ( lastModified == 0 ) { _log . addWarning ( "servlet.log.warning.nolastmodified" , path ) ; } // fix for 4474854 : use the request URL as key to look up jnlp file // in hash map String reqUrl = HttpUtils . getRequestURL ( dreq . getHttpRequest ( ) ) . toString ( ) ; // Check if entry already exist in HashMap JnlpFileEntry jnlpFile = ( JnlpFileEntry ) _jnlpFiles . get ( reqUrl ) ; if ( jnlpFile != null && jnlpFile . getLastModified ( ) == lastModified ) { // Entry found in cache , so return it return jnlpFile . getResponse ( ) ; } // Read information from WAR file long timeStamp = lastModified ; String mimeType = _servletContext . getMimeType ( path ) ; if ( mimeType == null ) { mimeType = JNLP_MIME_TYPE ; } StringBuilder jnlpFileTemplate = new StringBuilder ( ) ; URLConnection conn = resource . openConnection ( ) ; BufferedReader br = new BufferedReader ( new InputStreamReader ( conn . getInputStream ( ) , "UTF-8" ) ) ; String line = br . readLine ( ) ; if ( line != null && line . startsWith ( "TS:" ) ) { timeStamp = parseTimeStamp ( line . substring ( 3 ) ) ; _log . addDebug ( "Timestamp: " + timeStamp + " " + new Date ( timeStamp ) ) ; if ( timeStamp == 0 ) { _log . addWarning ( "servlet.log.warning.notimestamp" , path ) ; timeStamp = lastModified ; } line = br . readLine ( ) ; } while ( line != null ) { jnlpFileTemplate . append ( line ) ; line = br . readLine ( ) ; } String jnlpFileContent = specializeJnlpTemplate ( dreq . getHttpRequest ( ) , path , jnlpFileTemplate . toString ( ) ) ; // Convert to bytes as a UTF - 8 encoding byte [ ] byteContent = jnlpFileContent . getBytes ( "UTF-8" ) ; // Create entry DownloadResponse resp = DownloadResponse . getFileDownloadResponse ( byteContent , mimeType , timeStamp , jnlpres . getReturnVersionId ( ) ) ; jnlpFile = new JnlpFileEntry ( resp , lastModified ) ; _jnlpFiles . put ( reqUrl , jnlpFile ) ; return resp ;
public class LookupService { /** * Returns the long version of an IP address given an InetAddress object . * @ param address * the InetAddress . * @ return the long form of the IP address . */ private static long bytesToLong ( byte [ ] address ) { } }
long ipnum = 0 ; for ( int i = 0 ; i < 4 ; ++ i ) { long y = address [ i ] ; if ( y < 0 ) { y += 256 ; } ipnum += y << ( ( 3 - i ) * 8 ) ; } return ipnum ;
public class CmsSqlConsoleResults { /** * Gets the type to use for the Vaadin table column corresponding to the c - th column in this result . * @ param c the column index * @ return the class to use for the c - th Vaadin table column */ public Class < ? > getColumnType ( int c ) { } }
for ( int r = 0 ; r < m_data . size ( ) ; r ++ ) { Object val = m_data . get ( r ) . get ( c ) ; if ( val != null ) { return val . getClass ( ) ; } } return Object . class ;
public class CmsFormatterBeanParser { /** * Parses formatter attributes . * @ param formatterLoc the node location * @ return the map of formatter attributes ( unmodifiable ) */ private Map < String , String > parseAttributes ( I_CmsXmlContentLocation formatterLoc ) { } }
Map < String , String > result = new LinkedHashMap < > ( ) ; for ( I_CmsXmlContentValueLocation mappingLoc : formatterLoc . getSubValues ( N_ATTRIBUTE ) ) { String key = CmsConfigurationReader . getString ( m_cms , mappingLoc . getSubValue ( N_KEY ) ) ; String value = CmsConfigurationReader . getString ( m_cms , mappingLoc . getSubValue ( N_VALUE ) ) ; result . put ( key , value ) ; } return Collections . unmodifiableMap ( result ) ;
public class ExpressionList { /** * Add this instruction to the end of the list . * @ param _ instruction * @ return The instruction we added */ public Instruction add ( Instruction _instruction ) { } }
if ( head == null ) { head = _instruction ; } else { _instruction . setPrevExpr ( tail ) ; tail . setNextExpr ( _instruction ) ; } tail = _instruction ; logger . log ( Level . FINE , "After PUSH of " + _instruction + " tail=" + tail ) ; return ( tail ) ;
public class RegionAddressId { /** * Returns a region address identity given the region identity and the address name . The address * name must be 1-63 characters long , and comply with RFC1035 . Specifically , the name must be 1-63 * characters long and match the regular expression { @ code [ a - z ] ( [ - a - z0-9 ] * [ a - z0-9 ] ) ? } which means * the first character must be a lowercase letter , and all following characters must be a dash , * lowercase letter , or digit , except the last character , which cannot be a dash . * @ see < a href = " https : / / www . ietf . org / rfc / rfc1035 . txt " > RFC1035 < / a > */ public static RegionAddressId of ( RegionId regionId , String address ) { } }
return new RegionAddressId ( regionId . getProject ( ) , regionId . getRegion ( ) , address ) ;
public class Stage { /** * Answers the T protocol proxy for this newly created Actor . ( INTERNAL ONLY ) * @ param < T > the protocol type * @ param protocol the { @ code Class < T > } protocol of the Actor * @ param actor the Actor instance that backs the proxy protocol * @ param mailbox the Mailbox instance of this Actor * @ return T */ final < T > T actorProxyFor ( final Class < T > protocol , final Actor actor , final Mailbox mailbox ) { } }
return ActorProxy . createFor ( protocol , actor , mailbox ) ;
public class NioServer { /** * 初始化 * @ param address 地址和端口 * @ return this */ public NioServer init ( InetSocketAddress address ) { } }
try { // 打开服务器套接字通道 this . serverSocketChannel = ServerSocketChannel . open ( ) ; // 设置为非阻塞状态 serverSocketChannel . configureBlocking ( false ) ; // 获取通道相关联的套接字 final ServerSocket serverSocket = serverSocketChannel . socket ( ) ; // 绑定端口号 serverSocket . bind ( address ) ; // 打开一个选择器 selector = Selector . open ( ) ; // 服务器套接字注册到Selector中 并指定Selector监控连接事件 serverSocketChannel . register ( selector , SelectionKey . OP_ACCEPT ) ; } catch ( IOException e ) { throw new IORuntimeException ( e ) ; } return this ;
public class Governator { /** * Add a runtime profiles . Profiles are processed by the conditional binding { @ literal @ } ConditionalOnProfile and * are injectable as { @ literal @ } Profiles Set { @ literal < } String { @ literal > } . * @ param profiles Set of profiles * @ return this */ public Governator addProfiles ( String ... profiles ) { } }
if ( profiles != null ) { this . profiles . addAll ( Arrays . asList ( profiles ) ) ; } return this ;
public class EntityInfo { /** * 判断日志级别 * @ param logger Logger * @ param l Level * @ return boolean */ public boolean isLoggable ( Logger logger , Level l ) { } }
return logger . isLoggable ( l ) && l . intValue ( ) >= this . logLevel ;
public class DepthSparse3D { /** * Configures intrinsic camera parameters * @ param model Model for narrow FOV cameras * @ param visualToDepth Transform from visual to depth camera pixel coordinate systems . */ public void configure ( LensDistortionNarrowFOV model , PixelTransform < Point2D_F32 > visualToDepth ) { } }
this . visualToDepth = visualToDepth ; this . p2n = model . undistort_F64 ( true , false ) ;
public class DescribeTaskSetsResult { /** * The list of task sets described . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTaskSets ( java . util . Collection ) } or { @ link # withTaskSets ( java . util . Collection ) } if you want to override * the existing values . * @ param taskSets * The list of task sets described . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeTaskSetsResult withTaskSets ( TaskSet ... taskSets ) { } }
if ( this . taskSets == null ) { setTaskSets ( new com . amazonaws . internal . SdkInternalList < TaskSet > ( taskSets . length ) ) ; } for ( TaskSet ele : taskSets ) { this . taskSets . add ( ele ) ; } return this ;
public class Hash { /** * Merge two Hashes into one for Merkle Tree calculation * @ param a - a Hash * @ param b - another Hash * @ return SHA256 ( SHA256 ( a | | b ) ) */ public static Hash merge ( Hash a , Hash b ) { } }
try { MessageDigest digest = MessageDigest . getInstance ( "SHA-256" ) ; digest . update ( a . bytes ) ; return Hash . createFromSafeArray ( digest . digest ( digest . digest ( b . bytes ) ) ) ; } catch ( NoSuchAlgorithmException e ) { throw new RuntimeException ( e ) ; }
public class ConfigurableMavenWorkingSessionImpl { /** * Generates an instance of the { @ link DefaultRepositorySystemSession } and takes into account related properties */ private void generateSession ( ) { } }
this . session = this . system . getSession ( getSettings ( ) , this . useLegacyLocalRepository ) ; if ( this . disableClassPathWorkspaceReader ) { ( ( DefaultRepositorySystemSession ) this . session ) . setWorkspaceReader ( null ) ; }
public class Index { /** * Delete all synonym set * @ param forwardToReplicas Forward the operation to the replica indices * @ param requestOptions Options to pass to this request */ public JSONObject clearSynonyms ( boolean forwardToReplicas , RequestOptions requestOptions ) throws AlgoliaException { } }
return client . postRequest ( "/1/indexes/" + encodedIndexName + "/synonyms/clear?forwardToReplicas=" + forwardToReplicas , "" , true , false , requestOptions ) ;
public class HanLP { /** * 创建一个分词器 , * 这是一个工厂方法 < br > * @ param algorithm 分词算法 , 传入算法的中英文名都可以 , 可选列表 : < br > * < ul > * < li > 维特比 ( viterbi ) : 效率和效果的最佳平衡 < / li > * < li > 双数组trie树 ( dat ) : 极速词典分词 , 千万字符每秒 < / li > * < li > 条件随机场 ( crf ) : 分词 、 词性标注与命名实体识别精度都较高 , 适合要求较高的NLP任务 < / li > * < li > 感知机 ( perceptron ) : 分词 、 词性标注与命名实体识别 , 支持在线学习 < / li > * < li > N最短路 ( nshort ) : 命名实体识别稍微好一些 , 牺牲了速度 < / li > * < / ul > * @ return 一个分词器 */ public static Segment newSegment ( String algorithm ) { } }
if ( algorithm == null ) { throw new IllegalArgumentException ( String . format ( "非法参数 algorithm == %s" , algorithm ) ) ; } algorithm = algorithm . toLowerCase ( ) ; if ( "viterbi" . equals ( algorithm ) || "维特比" . equals ( algorithm ) ) return new ViterbiSegment ( ) ; // Viterbi分词器是目前效率和效果的最佳平衡 else if ( "dat" . equals ( algorithm ) || "双数组trie树" . equals ( algorithm ) ) return new DoubleArrayTrieSegment ( ) ; else if ( "nshort" . equals ( algorithm ) || "n最短路" . equals ( algorithm ) ) return new NShortSegment ( ) ; else if ( "crf" . equals ( algorithm ) || "条件随机场" . equals ( algorithm ) ) try { return new CRFLexicalAnalyzer ( ) ; } catch ( IOException e ) { logger . warning ( "CRF模型加载失败" ) ; throw new RuntimeException ( e ) ; } else if ( "perceptron" . equals ( algorithm ) || "感知机" . equals ( algorithm ) ) { try { return new PerceptronLexicalAnalyzer ( ) ; } catch ( IOException e ) { logger . warning ( "感知机模型加载失败" ) ; throw new RuntimeException ( e ) ; } } throw new IllegalArgumentException ( String . format ( "非法参数 algorithm == %s" , algorithm ) ) ;
public class HandlerConfigLoader { /** * Get properties for a specific config , for a handler which maintains properties grouped by configNames * Defaults may also be provided in a special default configName , defaults provide base values which may be overridden * by those set at configName level . * myHandler . config1 . property1 = val * myHandler . config1 . property2 = val * myHandler . config2 . property1 = val * myHandler . config2 . property2 = val * myHandler . default . property1 = val */ public Properties loadPropertiesForSubGroup ( ConfigurationManager configurationManager , String handlerPrefix , String groupName ) { } }
PropertyOperations handlerProps = properties ( loadProperties ( configurationManager , handlerPrefix ) ) ; PropertyOperations defaultProps = handlerProps . filterByAndRemoveKeyPrefix ( ChorusConstants . DEFAULT_PROPERTIES_GROUP + "." ) ; PropertyOperations configProps = handlerProps . filterByAndRemoveKeyPrefix ( groupName + "." ) ; PropertyOperations merged = defaultProps . merge ( configProps ) ; return merged . loadProperties ( ) ;
public class SimpleReadWriteLock { /** * Execute the provided callable in a write lock . Note : no * nullable / non - nullable can be assumed . * @ param aSupplier * Callable to be executed . May not be < code > null < / code > . * @ return The return value of the callable . May be < code > null < / code > . * @ param < T > * Return type */ public < T > T writeLocked ( @ Nonnull final Supplier < ? extends T > aSupplier ) { } }
writeLock ( ) . lock ( ) ; try { return aSupplier . get ( ) ; } finally { writeLock ( ) . unlock ( ) ; }
public class SavepointV2Serializer { @ Override public void serialize ( SavepointV2 checkpointMetadata , DataOutputStream dos ) throws IOException { } }
// first : checkpoint ID dos . writeLong ( checkpointMetadata . getCheckpointId ( ) ) ; // second : master state final Collection < MasterState > masterStates = checkpointMetadata . getMasterStates ( ) ; dos . writeInt ( masterStates . size ( ) ) ; for ( MasterState ms : masterStates ) { serializeMasterState ( ms , dos ) ; } // third : operator states Collection < OperatorState > operatorStates = checkpointMetadata . getOperatorStates ( ) ; dos . writeInt ( operatorStates . size ( ) ) ; for ( OperatorState operatorState : operatorStates ) { // Operator ID dos . writeLong ( operatorState . getOperatorID ( ) . getLowerPart ( ) ) ; dos . writeLong ( operatorState . getOperatorID ( ) . getUpperPart ( ) ) ; // Parallelism int parallelism = operatorState . getParallelism ( ) ; dos . writeInt ( parallelism ) ; dos . writeInt ( operatorState . getMaxParallelism ( ) ) ; dos . writeInt ( 1 ) ; // Sub task states Map < Integer , OperatorSubtaskState > subtaskStateMap = operatorState . getSubtaskStates ( ) ; dos . writeInt ( subtaskStateMap . size ( ) ) ; for ( Map . Entry < Integer , OperatorSubtaskState > entry : subtaskStateMap . entrySet ( ) ) { dos . writeInt ( entry . getKey ( ) ) ; serializeSubtaskState ( entry . getValue ( ) , dos ) ; } }
public class AbstractMailConnectionFactory { /** * This function initializes the connection factory . */ @ Override protected void initializeImpl ( ) { } }
// create properties from configuration ( all javax mail properties will be defined in the fax4j properties ) this . mailConnectionProperties = new Properties ( ) ; Map < String , String > configuration = this . factoryConfigurationHolder . getConfiguration ( ) ; Iterator < Entry < String , String > > iterator = configuration . entrySet ( ) . iterator ( ) ; Entry < String , String > entry = null ; String key = null ; String value = null ; while ( iterator . hasNext ( ) ) { // get next entry entry = iterator . next ( ) ; // get next key key = entry . getKey ( ) ; if ( ( key != null ) && ( key . indexOf ( "org.fax4j." ) == - 1 ) ) // filter out fax4j properties { // get next value value = entry . getValue ( ) ; // put in properties this . mailConnectionProperties . setProperty ( key , value ) ; } } // get user / password values this . userName = this . factoryConfigurationHolder . getConfigurationValue ( FaxClientSpiConfigurationConstants . USER_NAME_PROPERTY_KEY ) ; this . password = this . factoryConfigurationHolder . getConfigurationValue ( FaxClientSpiConfigurationConstants . PASSWORD_PROPERTY_KEY ) ; // get transport protocol this . transportProtocol = this . factoryConfigurationHolder . getConfigurationValue ( "mail.transport.protocol" ) ; if ( this . transportProtocol == null ) { this . transportProtocol = "smtp" ; } // get transport host this . transportHost = this . factoryConfigurationHolder . getConfigurationValue ( "mail.smtp.host" ) ; // get transport port String transportPortStr = this . factoryConfigurationHolder . getConfigurationValue ( "mail.smtp.port" ) ; this . transportPort = - 1 ; if ( transportPortStr != null ) { this . transportPort = Integer . parseInt ( transportPortStr ) ; }
public class ApiOvhDedicatedserver { /** * Retrieve RTM graph values * REST : GET / dedicated / server / { serviceName } / statistics / chart * @ param type [ required ] RTM chart type * @ param period [ required ] chart period * @ param serviceName [ required ] The internal name of your dedicated server */ public OvhChartReturn serviceName_statistics_chart_GET ( String serviceName , OvhRtmChartPeriodEnum period , OvhRtmChartTypeEnum type ) throws IOException { } }
String qPath = "/dedicated/server/{serviceName}/statistics/chart" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "period" , period ) ; query ( sb , "type" , type ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhChartReturn . class ) ;
public class Schema { /** * load indices for all vertices in schema * @ param traversalSource * @ param schemaVertex */ void loadVertexIndices ( GraphTraversalSource traversalSource , Vertex schemaVertex ) { } }
List < Path > indices = traversalSource . V ( schemaVertex ) . out ( SQLG_SCHEMA_SCHEMA_VERTEX_EDGE ) . as ( "vertex" ) . out ( SQLG_SCHEMA_VERTEX_INDEX_EDGE ) . as ( "index" ) . outE ( SQLG_SCHEMA_INDEX_PROPERTY_EDGE ) . order ( ) . by ( SQLG_SCHEMA_INDEX_PROPERTY_EDGE_SEQUENCE ) . inV ( ) . as ( "property" ) . path ( ) . toList ( ) ; for ( Path vertexIndices : indices ) { Vertex vertexVertex = null ; Vertex vertexIndex = null ; Vertex propertyIndex = null ; List < Set < String > > labelsList = vertexIndices . labels ( ) ; for ( Set < String > labels : labelsList ) { for ( String label : labels ) { switch ( label ) { case "vertex" : vertexVertex = vertexIndices . get ( "vertex" ) ; break ; case "index" : vertexIndex = vertexIndices . get ( "index" ) ; break ; case "property" : propertyIndex = vertexIndices . get ( "property" ) ; break ; case BaseStrategy . SQLG_PATH_FAKE_LABEL : case BaseStrategy . SQLG_PATH_ORDER_RANGE_LABEL : case Schema . MARKER : break ; default : throw new IllegalStateException ( String . format ( "BUG: Only \"vertex\",\"index\" and \"property\" is expected as a label. Found %s" , label ) ) ; } } } Preconditions . checkState ( vertexVertex != null , "BUG: Topology vertex not found." ) ; String schemaName = schemaVertex . value ( SQLG_SCHEMA_SCHEMA_NAME ) ; String tableName = vertexVertex . value ( SQLG_SCHEMA_VERTEX_LABEL_NAME ) ; VertexLabel vertexLabel = this . vertexLabels . get ( schemaName + "." + VERTEX_PREFIX + tableName ) ; if ( vertexLabel == null ) { vertexLabel = new VertexLabel ( this , tableName ) ; this . vertexLabels . put ( schemaName + "." + VERTEX_PREFIX + tableName , vertexLabel ) ; } if ( vertexIndex != null ) { String indexName = vertexIndex . value ( SQLG_SCHEMA_INDEX_NAME ) ; Optional < Index > oidx = vertexLabel . getIndex ( indexName ) ; Index idx ; if ( oidx . isPresent ( ) ) { idx = oidx . get ( ) ; } else { idx = new Index ( indexName , IndexType . fromString ( vertexIndex . value ( SQLG_SCHEMA_INDEX_INDEX_TYPE ) ) , vertexLabel ) ; vertexLabel . addIndex ( idx ) ; } if ( propertyIndex != null ) { String propertyName = propertyIndex . value ( SQLG_SCHEMA_PROPERTY_NAME ) ; vertexLabel . getProperty ( propertyName ) . ifPresent ( ( PropertyColumn pc ) -> idx . addProperty ( pc ) ) ; } } }
public class RowColumnOps { /** * Updates the values of row < tt > i < / tt > in the given matrix to be A [ i , : ] = A [ i , : ] + c [ : ] * < tt > t < / tt > . < br > * The Matrix < tt > A < / tt > and array < tt > c < / tt > do not need to have the same dimensions , so long as they both have indices in the given range . * @ param A the matrix to perform he update on * @ param i the row to update * @ param start the first index of the column to update from ( inclusive ) * @ param to the last index of the column to update ( exclusive ) * @ param t the constant to multiply all elements of < tt > c < / tt > by * @ param c the array of values to pairwise multiply by < tt > t < / tt > before adding to the elements of A */ public static void addMultRow ( Matrix A , int i , int start , int to , double t , double [ ] c ) { } }
for ( int j = start ; j < to ; j ++ ) A . increment ( i , j , c [ j ] * t ) ;
public class PippoSettings { /** * Override the setting at runtime with the specified value . * This change does not persist . * @ param name * @ param value */ public void overrideSetting ( String name , long value ) { } }
overrides . put ( name , Long . toString ( value ) ) ;
public class XmlStringTools { /** * Add a cdata section to a StringBuffer . * If the buffer is null , a new one is created . * @ param buffer * StringBuffer to fill * @ param cdataContent * the cdata content * @ return the buffer */ public static StringBuffer appendCdataSection ( StringBuffer buffer , String cdataContent ) { } }
StringBuffer _buffer = initStringBufferIfNecessary ( buffer ) ; return doAppendCdataSection ( _buffer , cdataContent ) ;
public class FnBigDecimal { /** * Determines whether the target object is null or not . * @ return false if the target object is null , true if not . */ public static final Function < BigDecimal , Boolean > isNotNull ( ) { } }
return ( Function < BigDecimal , Boolean > ) ( ( Function ) FnObject . isNotNull ( ) ) ;
public class FileOutputFormat { /** * Get the { @ link Path } to the task ' s temporary output directory * for the map - reduce job * < h4 id = " SideEffectFiles " > Tasks ' Side - Effect Files < / h4 > * < p > < i > Note : < / i > The following is valid only if the { @ link OutputCommitter } * is { @ link FileOutputCommitter } . If < code > OutputCommitter < / code > is not * a < code > FileOutputCommitter < / code > , the task ' s temporary output * directory is same as { @ link # getOutputPath ( JobConf ) } i . e . * < tt > $ { mapred . output . dir } $ < / tt > < / p > * < p > Some applications need to create / write - to side - files , which differ from * the actual job - outputs . * < p > In such cases there could be issues with 2 instances of the same TIP * ( running simultaneously e . g . speculative tasks ) trying to open / write - to the * same file ( path ) on HDFS . Hence the application - writer will have to pick * unique names per task - attempt ( e . g . using the attemptid , say * < tt > attempt _ 200709221812_0001 _ m _ 00000_0 < / tt > ) , not just per TIP . < / p > * < p > To get around this the Map - Reduce framework helps the application - writer * out by maintaining a special * < tt > $ { mapred . output . dir } / _ temporary / _ $ { taskid } < / tt > * sub - directory for each task - attempt on HDFS where the output of the * task - attempt goes . On successful completion of the task - attempt the files * in the < tt > $ { mapred . output . dir } / _ temporary / _ $ { taskid } < / tt > ( only ) * are < i > promoted < / i > to < tt > $ { mapred . output . dir } < / tt > . Of course , the * framework discards the sub - directory of unsuccessful task - attempts . This * is completely transparent to the application . < / p > * < p > The application - writer can take advantage of this by creating any * side - files required in < tt > $ { mapred . work . output . dir } < / tt > during execution * of his reduce - task i . e . via { @ link # getWorkOutputPath ( JobConf ) } , and the * framework will move them out similarly - thus she doesn ' t have to pick * unique paths per task - attempt . < / p > * < p > < i > Note < / i > : the value of < tt > $ { mapred . work . output . dir } < / tt > during * execution of a particular task - attempt is actually * < tt > $ { mapred . output . dir } / _ temporary / _ { $ taskid } < / tt > , and this value is * set by the map - reduce framework . So , just create any side - files in the * path returned by { @ link # getWorkOutputPath ( JobConf ) } from map / reduce * task to take advantage of this feature . < / p > * < p > The entire discussion holds true for maps of jobs with * reducer = NONE ( i . e . 0 reduces ) since output of the map , in that case , * goes directly to HDFS . < / p > * @ return the { @ link Path } to the task ' s temporary output directory * for the map - reduce job . */ public static Path getWorkOutputPath ( JobConf conf ) { } }
String name = conf . get ( "mapred.work.output.dir" ) ; return name == null ? null : new Path ( name ) ;
public class DiskFileItem { /** * Returns the content charset passed by the agent or < code > null < / code > if not * defined . * @ return The content charset passed by the agent or < code > null < / code > if not * defined . */ @ Nullable public String getCharSet ( ) { } }
// Parameter parser can handle null input final ICommonsMap < String , String > aParams = new ParameterParser ( ) . setLowerCaseNames ( true ) . parse ( getContentType ( ) , ';' ) ; return aParams . get ( "charset" ) ;
public class PipelineApi { /** * Get a Stream of pipelines in a project . * < pre > < code > GitLab Endpoint : GET / projects / : id / pipelines < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ return a Stream containing the pipelines for the specified project ID * @ throws GitLabApiException if any exception occurs during execution */ public Stream < Pipeline > getPipelinesStream ( Object projectIdOrPath ) throws GitLabApiException { } }
return ( getPipelines ( projectIdOrPath , getDefaultPerPage ( ) ) . stream ( ) ) ;
public class ClientVpnEndpoint { /** * Information about the associated target networks . A target network is a subnet in a VPC . * @ param associatedTargetNetworks * Information about the associated target networks . A target network is a subnet in a VPC . */ @ Deprecated public void setAssociatedTargetNetworks ( java . util . Collection < AssociatedTargetNetwork > associatedTargetNetworks ) { } }
if ( associatedTargetNetworks == null ) { this . associatedTargetNetworks = null ; return ; } this . associatedTargetNetworks = new com . amazonaws . internal . SdkInternalList < AssociatedTargetNetwork > ( associatedTargetNetworks ) ;
public class PebbleEngineFactory { /** * Return a Template Loader based on the given Template Loader list . * If more than one Template Loader has been registered , a DelegatingLoader needs to be created . * @ param templateLoaders the final List of TemplateLoader instances * @ return the aggregate TemplateLoader */ protected Loader < ? > getAggregateTemplateLoader ( Loader < ? > [ ] templateLoaders ) { } }
int loaderCount = ( templateLoaders == null ) ? 0 : templateLoaders . length ; switch ( loaderCount ) { case 0 : // Register default template loaders . Loader < ? > stringLoader = new StringLoader ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Pebble Engine Template Loader not specified. Default Template Loader registered: " + stringLoader ) ; } return stringLoader ; case 1 : if ( log . isDebugEnabled ( ) ) { log . debug ( "One Pebble Engine Template Loader registered: " + templateLoaders [ 0 ] ) ; } return templateLoaders [ 0 ] ; default : List < Loader < ? > > defaultLoadingStrategies = new ArrayList < > ( ) ; Collections . addAll ( defaultLoadingStrategies , templateLoaders ) ; Loader < ? > delegatingLoader = new DelegatingLoader ( defaultLoadingStrategies ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Multiple Pebble Engine Template Loader registered: " + delegatingLoader ) ; } return delegatingLoader ; }
public class InlineHintService { /** * Returns the total effort points in all of the { @ link InlineHintModel } s * associated with the { @ link FileModel } instances in the given { @ link ProjectModelTraversal } . * If set to recursive , then also include the effort points from child projects . * The result is a Map , the key contains the effort level and the value contains the number of incidents . */ public Map < Integer , Integer > getMigrationEffortByPoints ( ProjectModelTraversal traversal , Set < String > includeTags , Set < String > excludeTags , Set < String > issueCategoryIDs , boolean recursive , boolean includeZero ) { } }
MapSumEffortAccumulatorFunction < Integer > accumulator = new MapSumEffortAccumulatorFunction ( ) { public Object vertexToKey ( Vertex effortReportVertex ) { Integer migrationEffort = ( Integer ) effortReportVertex . property ( EffortReportModel . EFFORT ) . value ( ) ; return migrationEffort ; } } ; getMigrationEffortDetails ( traversal , includeTags , excludeTags , issueCategoryIDs , recursive , includeZero , accumulator ) ; return accumulator . getResults ( ) ;
public class BpmnDeploymentHelper { /** * Updates all the process definition entities to have the correct resource names . */ public void setResourceNamesOnProcessDefinitions ( ParsedDeployment parsedDeployment ) { } }
for ( ProcessDefinitionEntity processDefinition : parsedDeployment . getAllProcessDefinitions ( ) ) { String resourceName = parsedDeployment . getResourceForProcessDefinition ( processDefinition ) . getName ( ) ; processDefinition . setResourceName ( resourceName ) ; }
public class MediaPanel { /** * from interface FrameParticipant */ public void tick ( long tickStamp ) { } }
if ( _metamgr . isPaused ( ) ) { return ; } // let derived classes do their business willTick ( tickStamp ) ; // tick our meta manager which will tick our sprites and animations _metamgr . tick ( tickStamp ) ; // let derived classes do their business didTick ( tickStamp ) ; // make a note that the next paint will correspond to a call to tick ( ) _tickPaintPending = true ;
public class RegionDiskClient { /** * Returns a specified regional persistent disk . * < p > Sample code : * < pre > < code > * try ( RegionDiskClient regionDiskClient = RegionDiskClient . create ( ) ) { * ProjectRegionDiskName disk = ProjectRegionDiskName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ DISK ] " ) ; * Disk response = regionDiskClient . getRegionDisk ( disk ) ; * < / code > < / pre > * @ param disk Name of the regional persistent disk to return . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Disk getRegionDisk ( ProjectRegionDiskName disk ) { } }
GetRegionDiskHttpRequest request = GetRegionDiskHttpRequest . newBuilder ( ) . setDisk ( disk == null ? null : disk . toString ( ) ) . build ( ) ; return getRegionDisk ( request ) ;
public class CommerceAccountUserRelPersistenceImpl { /** * Caches the commerce account user rel in the entity cache if it is enabled . * @ param commerceAccountUserRel the commerce account user rel */ @ Override public void cacheResult ( CommerceAccountUserRel commerceAccountUserRel ) { } }
entityCache . putResult ( CommerceAccountUserRelModelImpl . ENTITY_CACHE_ENABLED , CommerceAccountUserRelImpl . class , commerceAccountUserRel . getPrimaryKey ( ) , commerceAccountUserRel ) ; commerceAccountUserRel . resetOriginalValues ( ) ;
public class JanusConfig { /** * Replies the value of the enumeration system property . * @ param < S > * - type of the enumeration to read . * @ param type * - type of the enumeration . * @ param name * - name of the property . * @ return the value , or < code > null < / code > if no property found . */ public static < S extends Enum < S > > S getSystemPropertyAsEnum ( Class < S > type , String name ) { } }
return getSystemPropertyAsEnum ( type , name , null ) ;
public class FileWindow { /** * Called when the text of the script has changed . */ public void updateText ( Dim . SourceInfo sourceInfo ) { } }
this . sourceInfo = sourceInfo ; String newText = sourceInfo . source ( ) ; if ( ! textArea . getText ( ) . equals ( newText ) ) { textArea . setText ( newText ) ; int pos = 0 ; if ( currentPos != - 1 ) { pos = currentPos ; } textArea . select ( pos ) ; } fileHeader . update ( ) ; fileHeader . repaint ( ) ;
public class Properties { /** * Save all properties in a file . * @ param properties * the { @ link Map } with all stored deeper nested { @ link Map } s * and / or values . * @ throws PropertyException * if the file could not be opened , created or is directed to a * directory */ public void save ( Map < String , Object > properties ) throws PropertyException { } }
try { mWriter . save ( properties ) ; } catch ( IOException e ) { throw new PropertyException ( e . getMessage ( ) ) ; }
public class AmazonDaxClient { /** * Creates a new subnet group . * @ param createSubnetGroupRequest * @ return Result of the CreateSubnetGroup operation returned by the service . * @ throws SubnetGroupAlreadyExistsException * The specified subnet group already exists . * @ throws SubnetGroupQuotaExceededException * The request cannot be processed because it would exceed the allowed number of subnets in a subnet group . * @ throws SubnetQuotaExceededException * The request cannot be processed because it would exceed the allowed number of subnets in a subnet group . * @ throws InvalidSubnetException * An invalid subnet identifier was specified . * @ throws ServiceLinkedRoleNotFoundException * @ sample AmazonDax . CreateSubnetGroup * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dax - 2017-04-19 / CreateSubnetGroup " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateSubnetGroupResult createSubnetGroup ( CreateSubnetGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateSubnetGroup ( request ) ;
public class CertHelper { /** * Creates a custom { @ link HostnameVerifier } that allows a specific certificate to be accepted for * a mismatching hostname . * @ param requestHostname hostname used to access the service which offers the incorrectly named * certificate * @ param certPrincipalName RFC 2253 name on the certificate * @ return A { @ link HostnameVerifier } that will accept the provided combination of names */ public static HostnameVerifier createIncorrectHostnameVerifier ( final String requestHostname , final String certPrincipalName ) { } }
return new HostnameVerifier ( ) { @ Override public boolean verify ( String hostname , SSLSession session ) { try { String principalName = session . getPeerPrincipal ( ) . getName ( ) ; if ( hostname . equals ( requestHostname ) && principalName . equals ( certPrincipalName ) ) return true ; } catch ( SSLPeerUnverifiedException e ) { } return HttpsURLConnection . getDefaultHostnameVerifier ( ) . verify ( hostname , session ) ; } } ;
public class LocalisationManager { /** * Method updateLocalisationSet * < p > Updates the Localisation sets of a Destination . * @ param messagingEngineUuid * @ param newQueuePointLocalisingMEUuids * @ throws SIResourceException */ public void updateLocalisationSet ( SIBUuid8 messagingEngineUuid , Set newQueuePointLocalisingMEUuids ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "updateLocalisationSet" , new Object [ ] { messagingEngineUuid , newQueuePointLocalisingMEUuids } ) ; // If the Queue points are not null , then synchronize ( Queue destination ) if ( _queuePointsGuessSet != null ) { // Add in any new localisations into the queuepoint guess set synchronized ( _queuePointsGuessSet ) { _queuePointsGuessSet . clear ( ) ; _hasLocal = false ; _hasRemote = false ; Iterator i = newQueuePointLocalisingMEUuids . iterator ( ) ; while ( i . hasNext ( ) ) { SIBUuid8 meUuid = new SIBUuid8 ( ( String ) i . next ( ) ) ; _queuePointsGuessSet . add ( meUuid ) ; if ( meUuid . equals ( messagingEngineUuid ) ) { _hasLocal = true ; } else { _hasRemote = true ; } } } } // If the Remote Queue points are not null , then synchronize ( Queue destination ) if ( _remoteQueuePointsGuessSet != null ) { synchronized ( _remoteQueuePointsGuessSet ) { HashSet < SIBUuid8 > temp = ( HashSet < SIBUuid8 > ) _queuePointsGuessSet . clone ( ) ; synchronized ( temp ) { _remoteQueuePointsGuessSet = temp ; _remoteQueuePointsGuessSet . remove ( messagingEngineUuid ) ; } } } updateTrmAdvertisements ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateLocalisationSet" ) ;
public class FSPathAlterationListenerAdaptor { /** * Transform the event triggered by file creation into JobSpec Creation for Driver ( One of the JobCatalogListener ) * Create a new JobSpec object and notify each of member inside JobCatalogListenersList * @ param rawPath This could be complete path to the newly - created configuration file . */ @ Override public void onFileCreate ( Path rawPath ) { } }
try { JobSpec newJobSpec = this . converter . apply ( loader . loadPullFile ( rawPath , sysConfig , false ) ) ; listeners . onAddJob ( newJobSpec ) ; } catch ( IOException e ) { throw new RuntimeException ( e . getMessage ( ) ) ; }
public class BaseLevel1 { /** * computes the sum of magnitudes of all vector elements or , for a complex vector x , the sum * @ param arr * @ return */ @ Override public double asum ( INDArray arr ) { } }
if ( arr . isSparse ( ) ) { return Nd4j . getSparseBlasWrapper ( ) . level1 ( ) . asum ( arr ) ; } if ( Nd4j . getExecutioner ( ) . getProfilingMode ( ) == OpExecutioner . ProfilingMode . ALL ) OpProfiler . getInstance ( ) . processBlasCall ( false , arr ) ; if ( arr . data ( ) . dataType ( ) == DataType . DOUBLE ) { DefaultOpExecutioner . validateDataType ( DataType . DOUBLE , arr ) ; return dasum ( arr . length ( ) , arr , BlasBufferUtil . getBlasStride ( arr ) ) ; } else if ( arr . data ( ) . dataType ( ) == DataType . FLOAT ) { DefaultOpExecutioner . validateDataType ( DataType . FLOAT , arr ) ; return sasum ( arr . length ( ) , arr , BlasBufferUtil . getBlasStride ( arr ) ) ; } else { DefaultOpExecutioner . validateDataType ( DataType . HALF , arr ) ; return hasum ( arr . length ( ) , arr , BlasBufferUtil . getBlasStride ( arr ) ) ; }
public class ClassDescriptor { /** * Add a { @ link CollectionDescriptor } . */ public void addCollectionDescriptor ( CollectionDescriptor cod ) { } }
m_CollectionDescriptors . add ( cod ) ; cod . setClassDescriptor ( this ) ; // BRJ m_collectionDescriptorNameMap = null ;
public class ObjectEncoder { static boolean contains ( String [ ] values , String value ) { } }
for ( String str : values ) { if ( str . equals ( value ) ) return true ; } return false ;
public class BProgram { /** * Adds an object to the program ' s global scope . JS code can reference the * added object by { @ code name } . * @ param name The name under which { @ code object } will be available to the * JS code . * @ param obj The object to be added to the program ' s scope . */ public void putInGlobalScope ( String name , Object obj ) { } }
if ( getGlobalScope ( ) == null ) { initialScopeValues . put ( name , obj ) ; } else { try { Context . enter ( ) ; getGlobalScope ( ) . put ( name , programScope , Context . javaToJS ( obj , programScope ) ) ; } finally { Context . exit ( ) ; } }
public class GenericDao { /** * Deletes an entity from the database . It calls * { @ link EntityManager # remove ( java . lang . Object ) } and throws the same * exceptions . * @ param entity the entity to delete . * @ return the deleted entity . */ @ Override public T remove ( T entity ) { } }
EntityManager entityManager = getEntityManager ( ) ; if ( entityManager . contains ( entity ) ) { entityManager . remove ( entity ) ; } else { entityManager . remove ( entityManager . merge ( entity ) ) ; } return entity ;
public class TinyPlugz { /** * Undeploys the global { @ link TinyPlugz } instance and calls its * { @ link # dispose ( ) } method . This method will fail if the instance on which * it is called is not the deployed instance . */ public final void undeploy ( ) { } }
synchronized ( TinyPlugzConfigurator . DEPLOY_LOCK ) { Require . state ( isDeployed ( ) , "Can not undeploy TinyPlugz: no instance deployed" ) ; final TinyPlugz plugz = instance ; Require . state ( plugz == this , "Undeploy called on an instance which was not the deployed one" ) ; instance = null ; plugz . dispose ( ) ; }
public class IOUtils { /** * Writes a byte [ ] to a file */ public static boolean writeBytesToFile ( byte [ ] bytes , File f ) { } }
try { FileOutputStream fout = new FileOutputStream ( f ) ; fout . write ( bytes ) ; fout . close ( ) ; } catch ( Exception e ) { System . out . println ( "Error writing byte[] to file: " + e . getMessage ( ) ) ; return false ; } return true ;
public class AirlineBoardingPassTemplateBuilder { /** * Adds a { @ link QuickReply } to the current object . * @ param title * the quick reply button label . It can ' t be empty . * @ param payload * the payload sent back when the button is pressed . It can ' t be * empty . * @ return this builder . * @ see < a href = * " https : / / developers . facebook . com / docs / messenger - platform / send - api - reference / quick - replies " * > Facebook ' s Messenger Platform Quick Replies Documentation < / a > */ public AirlineBoardingPassTemplateBuilder addQuickReply ( String title , String payload ) { } }
this . messageBuilder . addQuickReply ( title , payload ) ; return this ;
public class Utils { /** * Create an intent for emailing attendees of an event . * @ param resources The resources for translating strings . * @ param eventTitle The title of the event to use as the email subject . * @ param body The default text for the email body . * @ param toEmails The list of emails for the ' to ' line . * @ param ccEmails The list of emails for the ' cc ' line . * @ param ownerAccount The owner account to use as the email sender . */ public static Intent createEmailAttendeesIntent ( Resources resources , String eventTitle , String body , List < String > toEmails , List < String > ccEmails , String ownerAccount ) { } }
List < String > toList = toEmails ; List < String > ccList = ccEmails ; if ( toEmails . size ( ) <= 0 ) { if ( ccEmails . size ( ) <= 0 ) { // TODO : Return a SEND intent if no one to email to , to at least populate // a draft email with the subject ( and no recipients ) . throw new IllegalArgumentException ( "Both toEmails and ccEmails are empty." ) ; } // Email app does not work with no " to " recipient . Move all ' cc ' to ' to ' // in this case . toList = ccEmails ; ccList = null ; } // Use the event title as the email subject ( prepended with ' Re : ' ) . String subject = null ; if ( eventTitle != null ) { subject = resources . getString ( R . string . email_subject_prefix ) + eventTitle ; } // Use the SENDTO intent with a ' mailto ' URI , because using SEND will cause // the picker to show apps like text messaging , which does not make sense // for email addresses . We put all data in the URI instead of using the extra // Intent fields ( ie . EXTRA _ CC , etc ) because some email apps might not handle // those ( though gmail does ) . Uri . Builder uriBuilder = new Uri . Builder ( ) ; uriBuilder . scheme ( "mailto" ) ; // We will append the first email to the ' mailto ' field later ( because the // current state of the Email app requires it ) . Add the remaining ' to ' values // here . When the email codebase is updated , we can simplify this . if ( toList . size ( ) > 1 ) { for ( int i = 1 ; i < toList . size ( ) ; i ++ ) { // The Email app requires repeated parameter settings instead of // a single comma - separated list . uriBuilder . appendQueryParameter ( "to" , toList . get ( i ) ) ; } } // Add the subject parameter . if ( subject != null ) { uriBuilder . appendQueryParameter ( "subject" , subject ) ; } // Add the subject parameter . if ( body != null ) { uriBuilder . appendQueryParameter ( "body" , body ) ; } // Add the cc parameters . if ( ccList != null && ccList . size ( ) > 0 ) { for ( String email : ccList ) { uriBuilder . appendQueryParameter ( "cc" , email ) ; } } // Insert the first email after ' mailto : ' in the URI manually since Uri . Builder // doesn ' t seem to have a way to do this . String uri = uriBuilder . toString ( ) ; if ( uri . startsWith ( "mailto:" ) ) { StringBuilder builder = new StringBuilder ( uri ) ; builder . insert ( 7 , Uri . encode ( toList . get ( 0 ) ) ) ; uri = builder . toString ( ) ; } // Start the email intent . Email from the account of the calendar owner in case there // are multiple email accounts . Intent emailIntent = new Intent ( Intent . ACTION_SENDTO , Uri . parse ( uri ) ) ; emailIntent . putExtra ( "fromAccountString" , ownerAccount ) ; // Workaround a Email bug that overwrites the body with this intent extra . If not // set , it clears the body . if ( body != null ) { emailIntent . putExtra ( Intent . EXTRA_TEXT , body ) ; } return Intent . createChooser ( emailIntent , resources . getString ( R . string . email_picker_label ) ) ;
public class UnsafeMappedBuffer { /** * Allocates a mapped buffer . * Memory will be mapped by opening and expanding the given { @ link java . io . File } to the desired { @ code count } and mapping the * file contents into memory via { @ link java . nio . channels . FileChannel # map ( java . nio . channels . FileChannel . MapMode , long , long ) } . * The resulting buffer will have a capacity of { @ code initialCapacity } . The underlying { @ link UnsafeMappedBytes } will be * initialized to the next power of { @ code 2 } . As bytes are written to the buffer , the buffer ' s capacity will double * as long as { @ code maxCapacity > capacity } . * @ param file The file to map into memory . If the file doesn ' t exist it will be automatically created . * @ param initialCapacity The initial capacity of the buffer . * @ param maxCapacity The maximum capacity of the buffer . * @ return The mapped buffer . * @ throws NullPointerException If { @ code file } is { @ code null } * @ throws IllegalArgumentException If the { @ code capacity } or { @ code maxCapacity } is greater than * { @ link Integer # MAX _ VALUE } . * @ see UnsafeMappedBuffer # allocate ( java . io . File ) * @ see UnsafeMappedBuffer # allocate ( java . io . File , java . nio . channels . FileChannel . MapMode ) * @ see UnsafeMappedBuffer # allocate ( java . io . File , long ) * @ see UnsafeMappedBuffer # allocate ( java . io . File , java . nio . channels . FileChannel . MapMode , long ) * @ see UnsafeMappedBuffer # allocate ( java . io . File , java . nio . channels . FileChannel . MapMode , long , long ) */ public static UnsafeMappedBuffer allocate ( File file , long initialCapacity , long maxCapacity ) { } }
return allocate ( file , MappedMemoryAllocator . DEFAULT_MAP_MODE , initialCapacity , maxCapacity ) ;
public class CmsJspTagEnableAde { /** * Returns the preview mode include . < p > * @ param buttonLeft the button left parameter * @ param titleMessage the title attribute of the " Editor mode " button rendered by the include * @ return the preview mode include */ private static String getPreviewInclude ( String buttonLeft , String titleMessage ) { } }
StringBuffer buffer = new StringBuffer ( ) ; buffer . append ( "<style type=\"text/css\"> @import url(\"" ) . append ( CmsGwtActionElement . getFontIconCssLink ( ) ) . append ( "\"); </style>\n" ) ; buffer . append ( String . format ( PREVIEW_INCLUDE_SCRIPT , buttonLeft , titleMessage ) ) ; return buffer . toString ( ) ;
public class JSONHelpers { /** * Load a JSON file from one of the public directories defined by { @ link Environment } . * @ param publicDirectory One of the { @ code DIRECTORY _ * } constants defined by { @ code Environment } . * @ param file Relative path to file in the public directory . * @ return New instance of { @ link JSONObject } */ public static JSONObject loadPublicJSONFile ( final String publicDirectory , final String file ) { } }
final File dir = Environment . getExternalStoragePublicDirectory ( publicDirectory ) ; return loadJSONFile ( dir , file ) ;
public class Element { /** * Find elements that have this class , including or under this element . Case insensitive . * Elements can have multiple classes ( e . g . { @ code < div class = " header round first " > } . This method * checks each class , so you can find the above with { @ code el . getElementsByClass ( " header " ) ; } . * @ param className the name of the class to search for . * @ return elements with the supplied class name , empty if none * @ see # hasClass ( String ) * @ see # classNames ( ) */ public Elements getElementsByClass ( String className ) { } }
Validate . notEmpty ( className ) ; return Collector . collect ( new Evaluator . Class ( className ) , this ) ;
public class AnimatedDrawableValueAnimatorHelper { /** * Create a value animator for the given animation drawable and max animation duration in ms . * @ param drawable the drawable to create the animator for * @ param maxDurationMs the max duration in ms * @ return the animator to use */ @ Nullable public static ValueAnimator createValueAnimator ( Drawable drawable , int maxDurationMs ) { } }
if ( Build . VERSION . SDK_INT < Build . VERSION_CODES . HONEYCOMB ) { return null ; } if ( drawable instanceof AnimatedDrawable2 ) { return AnimatedDrawable2ValueAnimatorHelper . createValueAnimator ( ( AnimatedDrawable2 ) drawable , maxDurationMs ) ; } return null ;
public class Message { /** * Encodes this message . The protocol is first 4 bytes are length of this command followed by the byte stream of command * @ param txBuffer */ protected void encode ( ByteBuffer txBuffer ) { } }
txBuffer . position ( 4 ) ; // Length int txBuffer . put ( data ) ; int length = txBuffer . position ( ) ; txBuffer . rewind ( ) ; txBuffer . putInt ( length ) ; txBuffer . position ( length ) ;
public class ParagraphVectors { /** * This method calculates inferred vector for given document , with default parameters for learning rate and iterations * @ param document * @ return */ public INDArray inferVector ( LabelledDocument document ) { } }
return inferVector ( document , this . learningRate . get ( ) , this . minLearningRate , this . numEpochs * this . numIterations ) ;
public class MonetizationApi { /** * Define devicetype & # 39 ; s pricing tiers . * Define devicetype & # 39 ; s pricing tiers . * @ param dtid DeviceType ID ( required ) * @ param pricingTierInfo Pricing tier info ( required ) * @ return DeviceTypePricingTier * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public DeviceTypePricingTier createPricingTiers ( String dtid , DeviceTypePricingTier pricingTierInfo ) throws ApiException { } }
ApiResponse < DeviceTypePricingTier > resp = createPricingTiersWithHttpInfo ( dtid , pricingTierInfo ) ; return resp . getData ( ) ;
public class RequestHeaderX509CertificateExtractor { /** * Get certificate from header or return null . * HTTPD mod _ header writes " ( null ) " when the ssl variable is not filled * so that is treated as if the header were not present or blank . * @ param request HTTP request object * @ return Base64 encoded certificate */ private String getCertFromHeader ( final HttpServletRequest request ) { } }
val certHeaderValue = request . getHeader ( sslClientCertHeader ) ; if ( StringUtils . isBlank ( certHeaderValue ) ) { return null ; } if ( "(null)" . equalsIgnoreCase ( certHeaderValue ) ) { return null ; } return StringUtils . trim ( certHeaderValue ) ;
public class ChemObject { /** * { @ inheritDoc } */ @ Override public void setFlag ( int mask , boolean value ) { } }
if ( mask > Short . MAX_VALUE || ! isPowerOfTwo ( mask ) ) throw new IllegalArgumentException ( "setFlag() must be provided a valid CDKConstant and not used for custom properties" ) ; // set / unset a bit in the flags value if ( value ) flags |= mask ; else flags &= ~ ( mask ) ; notifyChanged ( ) ;
public class OptionsApi { /** * Add / Change / Delete options . * The PUT operation will add , change or delete values in CloudCluster / Options . * @ param body Body Data ( required ) * @ return OptionsPutResponseStatusSuccess * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public OptionsPutResponseStatusSuccess optionsPut ( OptionsPut body ) throws ApiException { } }
ApiResponse < OptionsPutResponseStatusSuccess > resp = optionsPutWithHttpInfo ( body ) ; return resp . getData ( ) ;
public class RSAUtils { /** * 私钥加密 * @ param data 源数据 * @ param privateKey 私钥 ( BASE64编码 ) * @ return * @ throws Exception */ public static byte [ ] encryptByPrivateKey ( byte [ ] data , String privateKey ) throws Exception { } }
String keyBytes = Base64Utils . decode ( privateKey ) ; PKCS8EncodedKeySpec pkcs8KeySpec = new PKCS8EncodedKeySpec ( keyBytes . getBytes ( ) ) ; KeyFactory keyFactory = KeyFactory . getInstance ( KEY_ALGORITHM ) ; Key privateK = keyFactory . generatePrivate ( pkcs8KeySpec ) ; Cipher cipher = Cipher . getInstance ( keyFactory . getAlgorithm ( ) ) ; cipher . init ( Cipher . ENCRYPT_MODE , privateK ) ; int inputLen = data . length ; ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; int offSet = 0 ; byte [ ] cache ; int i = 0 ; // 对数据分段加密 setData ( data , cipher , inputLen , out , offSet , i ) ; byte [ ] encryptedData = out . toByteArray ( ) ; out . close ( ) ; return encryptedData ;
public class DefaultBambooClient { /** * Grabs changeset information for the given build . * @ param build a Build * @ param buildJson the build JSON object */ private void addChangeSets ( Build build , JSONObject buildJson ) { } }
JSONObject changeSet = ( JSONObject ) buildJson . get ( "changes" ) ; // Map < String , String > revisionToUrl = new HashMap < > ( ) ; // / / Build a map of revision to module ( scm url ) . This is not always // / / provided by the Bamboo API , but we can use it if available . // for ( Object revision : getJsonArray ( changeSet , " revisions " ) ) { // JSONObject json = ( JSONObject ) revision ; // revisionToUrl . put ( json . get ( " revision " ) . toString ( ) , getString ( json , " module " ) ) ; for ( Object item : getJsonArray ( changeSet , "change" ) ) { JSONObject jsonItem = ( JSONObject ) item ; SCM scm = new SCM ( ) ; scm . setScmAuthor ( getString ( jsonItem , "author" ) ) ; scm . setScmCommitLog ( getString ( jsonItem , "comment" ) ) ; scm . setScmCommitTimestamp ( getCommitTimestamp ( jsonItem ) ) ; scm . setScmRevisionNumber ( getRevision ( jsonItem ) ) ; scm . setScmUrl ( getString ( jsonItem , "commitUrl" ) ) ; scm . setNumberOfChanges ( getJsonArray ( ( JSONObject ) jsonItem . get ( "files" ) , "file" ) . size ( ) ) ; build . getSourceChangeSet ( ) . add ( scm ) ; }
public class FacetHandler { /** * ( non - Javadoc ) * @ see javax . faces . view . facelets . FaceletHandler # apply ( javax . faces . view . facelets . FaceletContext , javax . faces . component . UIComponent ) */ public void apply ( FaceletContext ctx , UIComponent parent ) throws IOException , FacesException , FaceletException , ELException { } }
if ( parent == null ) { throw new TagException ( this . tag , "Parent UIComponent was null" ) ; } parent . getAttributes ( ) . put ( KEY , this . name . getValue ( ctx ) ) ; try { this . nextHandler . apply ( ctx , parent ) ; } finally { parent . getAttributes ( ) . remove ( KEY ) ; }
public class LdapIdentityStore { /** * Get the caller ' s full distinguished name ( DN ) . The DN can be returned in one of the following ways : * < ul > * < li > Using the callerSearchBase , caller ' s name and the callerBaseDn to form the DN . < / li > * < li > Search in LDAP for the user and returning the DN from the LDAP entry . < / li > * < / ul > * @ param callerName The caller ' s name . * @ param filter The filter to search for the caller . * @ param controls The { @ link SearchControls } object . * @ return The user ' s DN . */ private String getUserDn ( String callerName , String filter , SearchControls controls ) { } }
String userDn = null ; String searchBase = idStoreDefinition . getCallerSearchBase ( ) ; if ( searchBase == null || searchBase . isEmpty ( ) ) { userDn = idStoreDefinition . getCallerNameAttribute ( ) + "=" + callerName + "," + idStoreDefinition . getCallerBaseDn ( ) ; } else { DirContext ctx = null ; try { ctx = bind ( ) ; } catch ( NamingException e ) { Tr . error ( tc , "JAVAEESEC_ERROR_EXCEPTION_ON_BIND" , new Object [ ] { this . idStoreDefinition . getBindDn ( ) , e } ) ; throw new IllegalStateException ( e ) ; } try { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "JNDI_CALL search" , new Object [ ] { searchBase , filter , printControls ( controls ) } ) ; } NamingEnumeration < SearchResult > ne = ctx . search ( new LdapName ( searchBase ) , filter , controls ) ; if ( ne . hasMoreElements ( ) ) { userDn = ne . nextElement ( ) . getNameInNamespace ( ) ; if ( ne . hasMoreElements ( ) ) { Tr . warning ( tc , "JAVAEESEC_WARNING_MULTI_CALLER_LDAP" , new Object [ ] { callerName , filter , searchBase } ) ; return null ; } } } catch ( NamingException e ) { Tr . error ( tc , "JAVAEESEC_ERROR_EXCEPTION_ON_SEARCH" , new Object [ ] { callerName , filter , searchBase , e } ) ; throw new IllegalStateException ( e ) ; } } return userDn ;