signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CmsSystemConfiguration { /** * Sets the shell server options from the confriguration . < p > * @ param enabled the value of the ' enabled ' attribute * @ param portStr the value of the ' port ' attribute */ public void setShellServerOptions ( String enabled , String portStr ) { } }
int port ; try { port = Integer . parseInt ( portStr ) ; } catch ( NumberFormatException e ) { port = CmsRemoteShellConstants . DEFAULT_PORT ; } m_shellServerOptions = new CmsRemoteShellConfiguration ( Boolean . parseBoolean ( enabled ) , port ) ;
public class AgentSession { /** * Returns the generic metadata of the workgroup the agent belongs to . * @ param con the XMPPConnection to use . * @ param query an optional query object used to tell the server what metadata to retrieve . This can be null . * @ return the settings for the workgroup . * @ throws XMPPErrorException if an error occurs while sending the request to the server . * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException */ public GenericSettings getGenericSettings ( XMPPConnection con , String query ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
GenericSettings setting = new GenericSettings ( ) ; setting . setType ( IQ . Type . get ) ; setting . setTo ( workgroupJID ) ; GenericSettings response = connection . createStanzaCollectorAndSend ( setting ) . nextResultOrThrow ( ) ; return response ;
public class DatabaseSupport { /** * Gets every instance of the specified historical entity in the database . * @ param < T > the type of the entity . * @ param historicalEntityCls the class of the specified historical entity . * The entity must be a subtype of { @ link HistoricalEntity } . Cannot be * < code > null < / code > . * @ return the instances requested . Guaranteed not < code > null < / code > . */ public < T extends HistoricalEntity < ? > > List < T > getCurrent ( Class < T > historicalEntityCls ) { } }
EntityManager entityManager = this . entityManagerProvider . get ( ) ; CriteriaBuilder builder = entityManager . getCriteriaBuilder ( ) ; CriteriaQuery < T > criteriaQuery = builder . createQuery ( historicalEntityCls ) ; Root < T > root = criteriaQuery . from ( historicalEntityCls ) ; criteriaQuery . where ( expiredAt ( root , builder ) ) ; TypedQuery < T > typedQuery = entityManager . createQuery ( criteriaQuery ) ; return typedQuery . getResultList ( ) ;
public class OmemoService { /** * Decrypt a possible OMEMO encrypted messages in a { @ link MamManager . MamQuery } . * The returned list contains wrappers that either hold an { @ link OmemoMessage } in case the message was decrypted * properly , otherwise it contains the message itself . * @ param managerGuard authenticated OmemoManager . * @ param mamQuery Mam archive query * @ return list of { @ link MessageOrOmemoMessage } s . */ List < MessageOrOmemoMessage > decryptMamQueryResult ( OmemoManager . LoggedInOmemoManager managerGuard , MamManager . MamQuery mamQuery ) { } }
List < MessageOrOmemoMessage > result = new ArrayList < > ( ) ; for ( Message message : mamQuery . getMessages ( ) ) { if ( OmemoManager . stanzaContainsOmemoElement ( message ) ) { OmemoElement element = message . getExtension ( OmemoElement . NAME_ENCRYPTED , OmemoConstants . OMEMO_NAMESPACE_V_AXOLOTL ) ; // Decrypt OMEMO messages try { OmemoMessage . Received omemoMessage = decryptMessage ( managerGuard , message . getFrom ( ) . asBareJid ( ) , element ) ; result . add ( new MessageOrOmemoMessage ( omemoMessage ) ) ; } catch ( NoRawSessionException | CorruptedOmemoKeyException | CryptoFailedException e ) { LOGGER . log ( Level . WARNING , "decryptMamQueryResult failed to decrypt message from " + message . getFrom ( ) + " due to corrupted session/key: " + e . getMessage ( ) ) ; result . add ( new MessageOrOmemoMessage ( message ) ) ; } } else { // Wrap cleartext messages result . add ( new MessageOrOmemoMessage ( message ) ) ; } } return result ;
public class CryptoUtils { /** * TODO : find something better */ public static boolean areEqual ( @ Nullable SHA256Digest first , @ Nullable SHA256Digest second ) { } }
if ( first == null ) { return second == null ; } if ( second == null ) { return false ; } return Arrays . equals ( first . getEncodedState ( ) , second . getEncodedState ( ) ) ;
public class EmulatedFieldsForDumping { /** * Find and set the Object value of a given field named < code > name < / code > * in the receiver . * @ param name * A String , the name of the field to set * @ param value * New value for the field . */ @ Override public void put ( String name , Object value ) { } }
emulatedFields . put ( name , value ) ;
public class SDKUtil { /** * 根据signMethod的值 , 提供三种计算签名的方法 * @ param data * 待签名数据Map键值对形式 * @ param encoding * 编码 * @ return 签名是否成功 */ public static boolean sign ( Map < String , String > data , String encoding ) { } }
if ( isEmpty ( encoding ) ) { encoding = "UTF-8" ; } String signMethod = data . get ( param_signMethod ) ; String version = data . get ( SDKConstants . param_version ) ; if ( ! VERSION_1_0_0 . equals ( version ) && ! VERSION_5_0_1 . equals ( version ) && isEmpty ( signMethod ) ) { LogUtil . writeErrorLog ( "signMethod must Not null" ) ; return false ; } if ( isEmpty ( version ) ) { LogUtil . writeErrorLog ( "version must Not null" ) ; return false ; } if ( SIGNMETHOD_RSA . equals ( signMethod ) || VERSION_1_0_0 . equals ( version ) || VERSION_5_0_1 . equals ( version ) ) { if ( VERSION_5_0_0 . equals ( version ) || VERSION_1_0_0 . equals ( version ) || VERSION_5_0_1 . equals ( version ) ) { // 设置签名证书序列号 data . put ( SDKConstants . param_certId , CertUtil . getSignCertId ( ) ) ; // 将Map信息转换成key1 = value1 & key2 = value2的形式 String stringData = coverMap2String ( data ) ; LogUtil . writeLog ( "待签名请求报文串:[" + stringData + "]" ) ; byte [ ] byteSign = null ; String stringSign = null ; try { // 通过SHA1进行摘要并转16进制 byte [ ] signDigest = SecureUtil . sha1X16 ( stringData , encoding ) ; byteSign = SecureUtil . base64Encode ( SecureUtil . signBySoft ( CertUtil . getSignCertPrivateKey ( ) , signDigest ) ) ; stringSign = new String ( byteSign ) ; // 设置签名域值 data . put ( SDKConstants . param_signature , stringSign ) ; return true ; } catch ( Exception e ) { LogUtil . writeErrorLog ( "Sign Error" , e ) ; return false ; } } else if ( VERSION_5_1_0 . equals ( version ) ) { // 设置签名证书序列号 data . put ( SDKConstants . param_certId , CertUtil . getSignCertId ( ) ) ; // 将Map信息转换成key1 = value1 & key2 = value2的形式 String stringData = coverMap2String ( data ) ; LogUtil . writeLog ( "待签名请求报文串:[" + stringData + "]" ) ; byte [ ] byteSign = null ; String stringSign = null ; try { // 通过SHA256进行摘要并转16进制 byte [ ] signDigest = SecureUtil . sha256X16 ( stringData , encoding ) ; byteSign = SecureUtil . base64Encode ( SecureUtil . signBySoft256 ( CertUtil . getSignCertPrivateKey ( ) , signDigest ) ) ; stringSign = new String ( byteSign ) ; // 设置签名域值 data . put ( SDKConstants . param_signature , stringSign ) ; return true ; } catch ( Exception e ) { LogUtil . writeErrorLog ( "Sign Error" , e ) ; return false ; } } } else if ( SIGNMETHOD_SHA256 . equals ( signMethod ) ) { return signBySecureKey ( data , SDKConfig . getConfig ( ) . getSecureKey ( ) , encoding ) ; } else if ( SIGNMETHOD_SM3 . equals ( signMethod ) ) { return signBySecureKey ( data , SDKConfig . getConfig ( ) . getSecureKey ( ) , encoding ) ; } return false ;
public class RRBudgetV1_0Generator { /** * This method gets ParticipantTraineeSupportCosts details in * BudgetYearDataType such as TuitionFeeHealthInsurance * Stipends , Subsistence , Travel , Other , ParticipantTraineeNumber and TotalCost * based on the BudgetPeriodInfo for the RRBudget . * @ param periodInfo * ( BudgetPeriodInfo ) budget period entry . * @ return ParticipantTraineeSupportCosts corresponding to the * BudgetPeriodInfo object . */ private ParticipantTraineeSupportCosts getParticipantTraineeSupportCosts ( BudgetPeriodDto periodInfo ) { } }
ParticipantTraineeSupportCosts traineeSupportCosts = ParticipantTraineeSupportCosts . Factory . newInstance ( ) ; if ( periodInfo != null ) { traineeSupportCosts . setTuitionFeeHealthInsurance ( periodInfo . getPartTuition ( ) . bigDecimalValue ( ) ) ; traineeSupportCosts . setStipends ( periodInfo . getpartStipendCost ( ) . bigDecimalValue ( ) ) ; traineeSupportCosts . setTravel ( periodInfo . getpartTravelCost ( ) . bigDecimalValue ( ) ) ; traineeSupportCosts . setSubsistence ( periodInfo . getPartSubsistence ( ) . bigDecimalValue ( ) ) ; traineeSupportCosts . setOther ( getOtherPTSupportCosts ( periodInfo ) ) ; traineeSupportCosts . setParticipantTraineeNumber ( periodInfo . getparticipantCount ( ) ) ; traineeSupportCosts . setTotalCost ( traineeSupportCosts . getTuitionFeeHealthInsurance ( ) . add ( traineeSupportCosts . getStipends ( ) . add ( traineeSupportCosts . getTravel ( ) . add ( traineeSupportCosts . getSubsistence ( ) . add ( traineeSupportCosts . getOther ( ) . getCost ( ) ) ) ) ) ) ; } return traineeSupportCosts ;
public class Benchmark { /** * Getting all Benchmarkable methods out of the registered class . * @ return a Set with { @ link BenchmarkMethod } * @ throws PerfidixMethodCheckException */ List < BenchmarkMethod > getBenchmarkMethods ( ) throws PerfidixMethodCheckException { } }
// Generating Set for returnVal final List < BenchmarkMethod > elems = new ArrayList < BenchmarkMethod > ( ) ; // Getting all Methods and testing if its benchmarkable for ( final Class < ? > clazz : clazzes ) { for ( final Method meth : clazz . getDeclaredMethods ( ) ) { // Check if benchmarkable , if so , insert to returnVal ; if ( BenchmarkMethod . isBenchmarkable ( meth ) ) { final BenchmarkMethod benchmarkMeth = new BenchmarkMethod ( meth ) ; elems . add ( benchmarkMeth ) ; } } } return elems ;
public class UploadCallable { /** * Initiates a multipart upload and returns the upload id * @ param isUsingEncryption */ private String initiateMultipartUpload ( PutObjectRequest origReq , boolean isUsingEncryption ) { } }
InitiateMultipartUploadRequest req = null ; if ( isUsingEncryption && origReq instanceof EncryptedPutObjectRequest ) { req = new EncryptedInitiateMultipartUploadRequest ( origReq . getBucketName ( ) , origReq . getKey ( ) ) . withCannedACL ( origReq . getCannedAcl ( ) ) . withObjectMetadata ( origReq . getMetadata ( ) ) ; ( ( EncryptedInitiateMultipartUploadRequest ) req ) . setMaterialsDescription ( ( ( EncryptedPutObjectRequest ) origReq ) . getMaterialsDescription ( ) ) ; } else { req = new InitiateMultipartUploadRequest ( origReq . getBucketName ( ) , origReq . getKey ( ) ) . withCannedACL ( origReq . getCannedAcl ( ) ) . withObjectMetadata ( origReq . getMetadata ( ) ) ; } TransferManager . appendMultipartUserAgent ( req ) ; req . withAccessControlList ( origReq . getAccessControlList ( ) ) . withRequesterPays ( origReq . isRequesterPays ( ) ) . withStorageClass ( origReq . getStorageClass ( ) ) . withRedirectLocation ( origReq . getRedirectLocation ( ) ) . withSSECustomerKey ( origReq . getSSECustomerKey ( ) ) . withSSEAwsKeyManagementParams ( origReq . getSSEAwsKeyManagementParams ( ) ) . withGeneralProgressListener ( origReq . getGeneralProgressListener ( ) ) . withRequestMetricCollector ( origReq . getRequestMetricCollector ( ) ) ; String uploadId = s3 . initiateMultipartUpload ( req ) . getUploadId ( ) ; log . debug ( "Initiated new multipart upload: " + uploadId ) ; return uploadId ;
public class CxDxServerSessionImpl { /** * / * ( non - Javadoc ) * @ see org . jdiameter . api . cxdx . ServerCxDxSession # sendUserAuthorizationAnswer ( org . jdiameter . api . cxdx . events . JUserAuthorizationAnswer ) */ @ Override public void sendUserAuthorizationAnswer ( JUserAuthorizationAnswer answer ) throws InternalException , IllegalDiameterStateException , RouteException , OverloadException { } }
send ( Event . Type . SEND_MESSAGE , null , answer ) ;
public class ListGraphqlApisResult { /** * The < code > GraphqlApi < / code > objects . * @ param graphqlApis * The < code > GraphqlApi < / code > objects . */ public void setGraphqlApis ( java . util . Collection < GraphqlApi > graphqlApis ) { } }
if ( graphqlApis == null ) { this . graphqlApis = null ; return ; } this . graphqlApis = new java . util . ArrayList < GraphqlApi > ( graphqlApis ) ;
public class EnvLoader { /** * Initializes the environment */ public static synchronized void initializeEnvironment ( ) { } }
if ( _isStaticInit ) return ; _isStaticInit = true ; ClassLoader systemLoader = ClassLoader . getSystemClassLoader ( ) ; Thread thread = Thread . currentThread ( ) ; ClassLoader oldLoader = thread . getContextClassLoader ( ) ; try { thread . setContextClassLoader ( systemLoader ) ; if ( "1.8." . compareTo ( System . getProperty ( "java.runtime.version" ) ) > 0 ) throw new ConfigException ( "Baratine requires JDK 1.8 or later" ) ; // #2281 // PolicyImpl . init ( ) ; // EnvironmentStream . setStdout ( System . out ) ; // EnvironmentStream . setStderr ( System . err ) ; /* try { Vfs . initJNI ( ) ; } catch ( Throwable e ) { */ Properties props = System . getProperties ( ) ; ClassLoader envClassLoader = EnvironmentClassLoader . class . getClassLoader ( ) ; /* boolean isGlobalLoadableJndi = false ; try { Class < ? > cl = Class . forName ( " com . caucho . v5 . naming . InitialContextFactoryImpl " , false , systemLoader ) ; isGlobalLoadableJndi = ( cl ! = null ) ; } catch ( Exception e ) { log ( ) . log ( Level . FINER , e . toString ( ) , e ) ; / / # 3486 String namingPkgs = ( String ) props . get ( " java . naming . factory . url . pkgs " ) ; if ( namingPkgs = = null ) namingPkgs = " com . caucho . v5 . naming " ; else namingPkgs = namingPkgs + " : " + " com . caucho . v5 . naming " ; props . put ( " java . naming . factory . url . pkgs " , namingPkgs ) ; if ( isGlobalLoadableJndi ) { / / These properties require the server to be at the system loader if ( props . get ( " java . naming . factory . initial " ) = = null ) { props . put ( " java . naming . factory . initial " , " com . caucho . v5 . naming . InitialContextFactoryImpl " ) ; */ /* boolean isGlobalLoadableJmx = false ; try { Class < ? > cl = Class . forName ( " com . caucho . v5 . jmx . MBeanServerBuilderImpl " , false , systemLoader ) ; isGlobalLoadableJmx = ( cl ! = null ) ; } catch ( Exception e ) { log ( ) . log ( Level . FINER , e . toString ( ) , e ) ; if ( isGlobalLoadableJmx ) { / / props . put ( " java . naming . factory . url . pkgs " , " com . caucho . naming " ) ; EnvironmentProperties . enableEnvironmentSystemProperties ( true ) ; String oldBuilder = props . getProperty ( " javax . management . builder . initial " ) ; if ( oldBuilder = = null ) { oldBuilder = " com . caucho . v5 . jmx . MBeanServerBuilderImpl " ; props . put ( " javax . management . builder . initial " , oldBuilder ) ; Object value = ManagementFactory . getPlatformMBeanServer ( ) ; */ } catch ( Throwable e ) { log ( ) . log ( Level . FINE , e . toString ( ) , e ) ; } finally { thread . setContextClassLoader ( oldLoader ) ; _isInitComplete = true ; }
public class RESTAuthHelper { /** * Determines whether the given { @ code principal } has specified { @ code permission } on the given { @ code resource } . * @ param authHeader contents of an HTTP Authorization header * @ param resource representation of the resource being accessed * @ param principal the identity of the subject accessing the resource * @ param permission the permission * @ return { @ code true } if either auth is disabled or authorization is granted , and { @ code false } * if auth is enabled and authorization is not granted * @ throws AuthException if either authentication or authorization fails */ public boolean isAuthorized ( List < String > authHeader , String resource , Principal principal , AuthHandler . Permissions permission ) throws AuthException { } }
if ( isAuthEnabled ( ) ) { return pravegaAuthManager . authorize ( resource , principal , parseCredentials ( authHeader ) , permission ) ; } else { // Since auth is disabled , every request is deemed to have been authorized . return true ; }
public class PlayEngine { /** * Send VOD seek control message * @ param msgIn * Message input * @ param position * Playlist item * @ return Out - of - band control message call result or - 1 on failure */ private int sendVODSeekCM ( int position ) { } }
OOBControlMessage oobCtrlMsg = new OOBControlMessage ( ) ; oobCtrlMsg . setTarget ( ISeekableProvider . KEY ) ; oobCtrlMsg . setServiceName ( "seek" ) ; Map < String , Object > paramMap = new HashMap < String , Object > ( 1 ) ; paramMap . put ( "position" , position ) ; oobCtrlMsg . setServiceParamMap ( paramMap ) ; msgInReference . get ( ) . sendOOBControlMessage ( this , oobCtrlMsg ) ; if ( oobCtrlMsg . getResult ( ) instanceof Integer ) { return ( Integer ) oobCtrlMsg . getResult ( ) ; } else { return - 1 ; }
public class HornSchunckPyramid { /** * Takes the flow from the previous lower resolution layer and uses it to initialize the flow * in the current layer . Adjusts for change in image scale . */ protected void warpImageTaylor ( GrayF32 before , GrayF32 flowX , GrayF32 flowY , GrayF32 after ) { } }
interp . setImage ( before ) ; for ( int y = 0 ; y < before . height ; y ++ ) { int pixelIndex = y * before . width ; for ( int x = 0 ; x < before . width ; x ++ , pixelIndex ++ ) { float u = flowX . data [ pixelIndex ] ; float v = flowY . data [ pixelIndex ] ; float wx = x + u ; float wy = y + v ; if ( wx < 0 || wx > before . width - 1 || wy < 0 || wy > before . height - 1 ) { // setting outside pixels to zero seems to produce smoother results than extending the image after . data [ pixelIndex ] = 0 ; } else { after . data [ pixelIndex ] = interp . get ( wx , wy ) ; } } }
public class ApiOvhMe { /** * Get associated payment method transaction * REST : GET / me / payment / transaction / { transactionId } * @ param transactionId [ required ] Payment method transaction ID * API beta */ public OvhTransaction payment_transaction_transactionId_GET ( Long transactionId ) throws IOException { } }
String qPath = "/me/payment/transaction/{transactionId}" ; StringBuilder sb = path ( qPath , transactionId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhTransaction . class ) ;
public class DatastoreStorage { /** * Delete resource by id . Deletes both counter shards and counter limit if it exists . * < p > Due to Datastore limitations ( modify max 25 entity groups per transaction ) , * we cannot do everything in one transaction . */ void deleteResource ( String id ) throws IOException { } }
storeWithRetries ( ( ) -> { datastore . delete ( datastore . newKeyFactory ( ) . setKind ( KIND_COUNTER_LIMIT ) . newKey ( id ) ) ; return null ; } ) ; deleteShardsForCounter ( id ) ;
public class DailyVolumeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DailyVolume dailyVolume , ProtocolMarshaller protocolMarshaller ) { } }
if ( dailyVolume == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( dailyVolume . getStartDate ( ) , STARTDATE_BINDING ) ; protocolMarshaller . marshall ( dailyVolume . getVolumeStatistics ( ) , VOLUMESTATISTICS_BINDING ) ; protocolMarshaller . marshall ( dailyVolume . getDomainIspPlacements ( ) , DOMAINISPPLACEMENTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsProjectDriver { /** * Publishes a new file . < p > * @ param dbc the current database context * @ param onlineProject the online project * @ param offlineResource the resource to publish * @ param publishedContentIds contains the UUIDs of already published content records * @ param publishHistoryId the publish history id * @ param publishTag the publish tag * @ throws CmsDataAccessException is something goes wrong */ protected void publishNewFile ( CmsDbContext dbc , CmsProject onlineProject , CmsResource offlineResource , Set < CmsUUID > publishedContentIds , CmsUUID publishHistoryId , int publishTag ) throws CmsDataAccessException { } }
CmsResourceState resourceState = fixMovedResource ( dbc , onlineProject , offlineResource , publishHistoryId , publishTag ) ; CmsFile newFile ; try { // reset the labeled link flag before writing the online file int flags = offlineResource . getFlags ( ) ; flags &= ~ CmsResource . FLAG_LABELED ; offlineResource . setFlags ( flags ) ; // publish the file content newFile = m_driverManager . getProjectDriver ( dbc ) . publishFileContent ( dbc , dbc . currentProject ( ) , onlineProject , offlineResource , publishedContentIds , true , publishTag ) ; } catch ( CmsVfsResourceAlreadyExistsException e ) { try { // remove the existing file and ensure that it ' s content is written // in any case by removing it ' s resource ID from the set of published resource IDs m_driverManager . getVfsDriver ( dbc ) . removeFile ( dbc , onlineProject . getUuid ( ) , offlineResource ) ; publishedContentIds . remove ( offlineResource . getResourceId ( ) ) ; newFile = m_driverManager . getProjectDriver ( dbc ) . publishFileContent ( dbc , dbc . currentProject ( ) , onlineProject , offlineResource , publishedContentIds , true , publishTag ) ; } catch ( CmsDataAccessException e1 ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_PUBLISHING_RESOURCE_1 , offlineResource . getRootPath ( ) ) , e ) ; } throw e1 ; } } catch ( CmsDataAccessException e ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_PUBLISHING_RESOURCE_1 , offlineResource . getRootPath ( ) ) , e ) ; } throw e ; } List < CmsProperty > offlineProperties ; try { // write the properties online offlineProperties = m_driverManager . getVfsDriver ( dbc ) . readPropertyObjects ( dbc , dbc . currentProject ( ) , offlineResource ) ; CmsProperty . setAutoCreatePropertyDefinitions ( offlineProperties , true ) ; m_driverManager . getVfsDriver ( dbc ) . writePropertyObjects ( dbc , onlineProject , newFile , offlineProperties ) ; } catch ( CmsDataAccessException e ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_PUBLISHING_PROPERTIES_1 , newFile . getRootPath ( ) ) , e ) ; } throw e ; } try { // write the ACL online m_driverManager . getUserDriver ( dbc ) . publishAccessControlEntries ( dbc , dbc . currentProject ( ) , onlineProject , offlineResource . getResourceId ( ) , newFile . getResourceId ( ) ) ; } catch ( CmsDataAccessException e ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_PUBLISHING_ACL_1 , newFile . getRootPath ( ) ) , e ) ; } throw e ; } CmsFile offlineFile = new CmsFile ( offlineResource ) ; offlineFile . setContents ( newFile . getContents ( ) ) ; internalWriteHistory ( dbc , offlineFile , resourceState , offlineProperties , publishHistoryId , publishTag ) ; m_driverManager . getVfsDriver ( dbc ) . updateRelations ( dbc , onlineProject , offlineResource ) ;
public class Hashids { /** * / * Private methods */ private String _encode ( long ... numbers ) { } }
long numberHashInt = 0 ; for ( int i = 0 ; i < numbers . length ; i ++ ) { numberHashInt += ( numbers [ i ] % ( i + 100 ) ) ; } String alphabet = this . alphabet ; final char ret = alphabet . charAt ( ( int ) ( numberHashInt % alphabet . length ( ) ) ) ; long num ; long sepsIndex , guardIndex ; String buffer ; final StringBuilder ret_strB = new StringBuilder ( this . minHashLength ) ; ret_strB . append ( ret ) ; char guard ; for ( int i = 0 ; i < numbers . length ; i ++ ) { num = numbers [ i ] ; buffer = ret + this . salt + alphabet ; alphabet = Hashids . consistentShuffle ( alphabet , buffer . substring ( 0 , alphabet . length ( ) ) ) ; final String last = Hashids . hash ( num , alphabet ) ; ret_strB . append ( last ) ; if ( i + 1 < numbers . length ) { if ( last . length ( ) > 0 ) { num %= ( last . charAt ( 0 ) + i ) ; sepsIndex = ( int ) ( num % this . seps . length ( ) ) ; } else { sepsIndex = 0 ; } ret_strB . append ( this . seps . charAt ( ( int ) sepsIndex ) ) ; } } String ret_str = ret_strB . toString ( ) ; if ( ret_str . length ( ) < this . minHashLength ) { guardIndex = ( numberHashInt + ( ret_str . charAt ( 0 ) ) ) % this . guards . length ( ) ; guard = this . guards . charAt ( ( int ) guardIndex ) ; ret_str = guard + ret_str ; if ( ret_str . length ( ) < this . minHashLength ) { guardIndex = ( numberHashInt + ( ret_str . charAt ( 2 ) ) ) % this . guards . length ( ) ; guard = this . guards . charAt ( ( int ) guardIndex ) ; ret_str += guard ; } } final int halfLen = alphabet . length ( ) / 2 ; while ( ret_str . length ( ) < this . minHashLength ) { alphabet = Hashids . consistentShuffle ( alphabet , alphabet ) ; ret_str = alphabet . substring ( halfLen ) + ret_str + alphabet . substring ( 0 , halfLen ) ; final int excess = ret_str . length ( ) - this . minHashLength ; if ( excess > 0 ) { final int start_pos = excess / 2 ; ret_str = ret_str . substring ( start_pos , start_pos + this . minHashLength ) ; } } return ret_str ;
public class Maze2D { /** * Fill a rectangle defined by points a and b with empty tiles . * @ param a point a of the rectangle . * @ param b point b of the rectangle . */ public void removeObstacleRectangle ( Point a , Point b ) { } }
updateRectangle ( a , b , Symbol . EMPTY ) ;
public class XmpReader { /** * Performs the XMP data extraction , adding found values to the specified instance of { @ link Metadata } . * The extraction is done with Adobe ' s XMPCore library . */ public void extract ( @ NotNull final byte [ ] xmpBytes , @ NotNull Metadata metadata , @ Nullable Directory parentDirectory ) { } }
extract ( xmpBytes , 0 , xmpBytes . length , metadata , parentDirectory ) ;
public class CommonOps_DDRM { /** * Performs the following operation : < br > * < br > * c = c + a < sup > T < / sup > * b < br > * c < sub > ij < / sub > = c < sub > ij < / sub > + & sum ; < sub > k = 1 : n < / sub > { a < sub > ki < / sub > * b < sub > kj < / sub > } * @ param a The left matrix in the multiplication operation . Not modified . * @ param b The right matrix in the multiplication operation . Not modified . * @ param c Where the results of the operation are stored . Modified . */ public static void multAddTransA ( DMatrix1Row a , DMatrix1Row b , DMatrix1Row c ) { } }
if ( b . numCols == 1 ) { if ( a . numCols >= EjmlParameters . MULT_COLUMN_SWITCH ) { MatrixVectorMult_DDRM . multAddTransA_reorder ( a , b , c ) ; } else { MatrixVectorMult_DDRM . multAddTransA_small ( a , b , c ) ; } } else { if ( a . numCols >= EjmlParameters . MULT_COLUMN_SWITCH || b . numCols >= EjmlParameters . MULT_COLUMN_SWITCH ) { MatrixMatrixMult_DDRM . multAddTransA_reorder ( a , b , c ) ; } else { MatrixMatrixMult_DDRM . multAddTransA_small ( a , b , c ) ; } }
public class DescribeTagsRequest { /** * You can filter the list using a < i > key < / i > - < i > value < / i > format . You can separate these items by using logical * operators . Allowed filters include < code > tagKey < / code > , < code > tagValue < / code > , and < code > configurationId < / code > . * @ param filters * You can filter the list using a < i > key < / i > - < i > value < / i > format . You can separate these items by using * logical operators . Allowed filters include < code > tagKey < / code > , < code > tagValue < / code > , and * < code > configurationId < / code > . */ public void setFilters ( java . util . Collection < TagFilter > filters ) { } }
if ( filters == null ) { this . filters = null ; return ; } this . filters = new java . util . ArrayList < TagFilter > ( filters ) ;
public class ClassInfo { /** * Add this field in the Record ' s field sequence . */ public BaseField setupField ( int iFieldSeq ) { } }
BaseField field = null ; // if ( iFieldSeq = = 0) // field = new CounterField ( this , ID , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // field . setHidden ( true ) ; // if ( iFieldSeq = = 1) // field = new RecordChangedField ( this , LAST _ CHANGED , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // field . setHidden ( true ) ; // if ( iFieldSeq = = 2) // field = new BooleanField ( this , DELETED , Constants . DEFAULT _ FIELD _ LENGTH , null , new Boolean ( false ) ) ; // field . setHidden ( true ) ; if ( iFieldSeq == 3 ) { field = new StringField ( this , CLASS_NAME , 40 , null , null ) ; field . setNullable ( false ) ; } if ( iFieldSeq == 4 ) field = new StringField ( this , BASE_CLASS_NAME , 40 , null , null ) ; if ( iFieldSeq == 5 ) field = new StringField ( this , CLASS_DESC , 255 , null , null ) ; if ( iFieldSeq == 6 ) { field = new ClassProjectField ( this , CLASS_PROJECT_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . addListener ( new InitOnceFieldHandler ( null ) ) ; } if ( iFieldSeq == 7 ) { field = new StringField ( this , CLASS_PACKAGE , 60 , null , null ) ; field . addListener ( new InitOnceFieldHandler ( null ) ) ; } if ( iFieldSeq == 8 ) field = new StringField ( this , CLASS_SOURCE_FILE , 40 , null , null ) ; if ( iFieldSeq == 9 ) { field = new ClassTypeField ( this , CLASS_TYPE , 20 , null , null ) ; field . addListener ( new InitOnceFieldHandler ( null ) ) ; } if ( iFieldSeq == 10 ) field = new XmlField ( this , CLASS_EXPLAIN , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 11 ) field = new XmlField ( this , CLASS_HELP , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 12 ) field = new StringField ( this , CLASS_IMPLEMENTS , 60 , null , null ) ; if ( iFieldSeq == 13 ) field = new MemoField ( this , SEE_ALSO , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 14 ) field = new MemoField ( this , TECHNICAL_INFO , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 15 ) field = new StringField ( this , COPY_DESC_FROM , 50 , null , null ) ; if ( field == null ) field = super . setupField ( iFieldSeq ) ; return field ;
public class TIFFImageWriter { /** * TODO : Candidate util method */ private ImageWriteParam copyParams ( final ImageWriteParam param , final ImageWriter writer ) { } }
if ( param == null ) { return null ; } // Always safe ImageWriteParam writeParam = writer . getDefaultWriteParam ( ) ; writeParam . setSourceSubsampling ( param . getSourceXSubsampling ( ) , param . getSourceYSubsampling ( ) , param . getSubsamplingXOffset ( ) , param . getSubsamplingYOffset ( ) ) ; writeParam . setSourceRegion ( param . getSourceRegion ( ) ) ; writeParam . setSourceBands ( param . getSourceBands ( ) ) ; // Only if canWriteCompressed ( ) writeParam . setCompressionMode ( param . getCompressionMode ( ) ) ; if ( param . getCompressionMode ( ) == ImageWriteParam . MODE_EXPLICIT ) { writeParam . setCompressionQuality ( param . getCompressionQuality ( ) ) ; } return writeParam ;
public class DefaultPropertiesLoader { /** * This method loads default System Properties if : * - the context is not an integration - test , and * - the property is not already set * This method mutates explicitly specified system * Properties only if : * - they represent relative paths in order to * make them relative to the explicit or implicit * home directory */ public void loadSystemProperties ( ) { } }
LOGGER . info ( "Loading properties" ) ; if ( getProperty ( "integration-test" ) == null ) { LOGGER . trace ( "Setting default properties, if necessary." ) ; final String fcrepoHome = getProperty ( "fcrepo.home" ) ; final String baseDir = ( fcrepoHome == null ? getProperty ( "user.dir" ) + SEP + "fcrepo4-data" + SEP : fcrepoHome + SEP ) ; for ( final PROPERTIES prop : PROPERTIES . values ( ) ) { final String value = getProperty ( prop . getValue ( ) ) ; if ( value == null ) { if ( prop . getSetDefaultValue ( ) ) { setProperty ( prop . getValue ( ) , baseDir ) ; } } else { updateRelativePropertyPath ( prop . getValue ( ) , value , baseDir ) ; } } } for ( final PROPERTIES prop : PROPERTIES . values ( ) ) { final String val = prop . getValue ( ) ; LOGGER . info ( "{} = {}" , val , getProperty ( val ) ) ; }
public class Tracer { /** * Create connection listener * @ param poolName The name of the pool * @ param mcp The managed connection pool * @ param cl The connection listener * @ param mc The managed connection * @ param get A GET operation * @ param prefill A PREFILL operation * @ param incrementer An INCREMENTER operation * @ param callstack The call stack */ public static synchronized void createConnectionListener ( String poolName , Object mcp , Object cl , Object mc , boolean get , boolean prefill , boolean incrementer , Throwable callstack ) { } }
if ( get ) { log . tracef ( "%s" , new TraceEvent ( poolName , Integer . toHexString ( System . identityHashCode ( mcp ) ) , TraceEvent . CREATE_CONNECTION_LISTENER_GET , Integer . toHexString ( System . identityHashCode ( cl ) ) , Integer . toHexString ( System . identityHashCode ( mc ) ) , ! confidential && callstack != null ? toString ( callstack ) : "" ) ) ; } else if ( prefill ) { log . tracef ( "%s" , new TraceEvent ( poolName , Integer . toHexString ( System . identityHashCode ( mcp ) ) , TraceEvent . CREATE_CONNECTION_LISTENER_PREFILL , Integer . toHexString ( System . identityHashCode ( cl ) ) , Integer . toHexString ( System . identityHashCode ( mc ) ) , ! confidential && callstack != null ? toString ( callstack ) : "" ) ) ; } else if ( incrementer ) { log . tracef ( "%s" , new TraceEvent ( poolName , Integer . toHexString ( System . identityHashCode ( mcp ) ) , TraceEvent . CREATE_CONNECTION_LISTENER_INCREMENTER , Integer . toHexString ( System . identityHashCode ( cl ) ) , Integer . toHexString ( System . identityHashCode ( mc ) ) , ! confidential && callstack != null ? toString ( callstack ) : "" ) ) ; }
public class OmemoMessageBuilder { /** * Encrypt the message with the aes key . * Move the AuthTag from the end of the cipherText to the end of the messageKey afterwards . * This prevents an attacker which compromised one recipient device to switch out the cipherText for other recipients . * @ see < a href = " https : / / conversations . im / omemo / audit . pdf " > OMEMO security audit < / a > . * @ param message plaintext message * @ throws NoSuchPaddingException * @ throws NoSuchProviderException * @ throws InvalidAlgorithmParameterException * @ throws InvalidKeyException * @ throws UnsupportedEncodingException * @ throws BadPaddingException * @ throws IllegalBlockSizeException */ private void setMessage ( String message ) throws NoSuchPaddingException , NoSuchAlgorithmException , InvalidAlgorithmParameterException , InvalidKeyException , UnsupportedEncodingException , BadPaddingException , IllegalBlockSizeException { } }
if ( message == null ) { return ; } // Encrypt message body SecretKey secretKey = new SecretKeySpec ( messageKey , KEYTYPE ) ; IvParameterSpec ivSpec = new IvParameterSpec ( initializationVector ) ; Cipher cipher = Cipher . getInstance ( CIPHERMODE ) ; cipher . init ( Cipher . ENCRYPT_MODE , secretKey , ivSpec ) ; byte [ ] body ; byte [ ] ciphertext ; body = message . getBytes ( StringUtils . UTF8 ) ; ciphertext = cipher . doFinal ( body ) ; byte [ ] clearKeyWithAuthTag = new byte [ messageKey . length + 16 ] ; byte [ ] cipherTextWithoutAuthTag = new byte [ ciphertext . length - 16 ] ; moveAuthTag ( messageKey , ciphertext , clearKeyWithAuthTag , cipherTextWithoutAuthTag ) ; ciphertextMessage = cipherTextWithoutAuthTag ; messageKey = clearKeyWithAuthTag ;
public class AuthServerLogic { /** * Saves user credentials */ public Credentials userSignUp ( Credentials credentials ) throws DAOException { } }
if ( getUserByEmail ( dao , credentials . getEmailAddress ( ) ) != null ) { throw new DAOException ( HttpStatus . SC_CONFLICT , "User Already Exists" ) ; } ServerCredentials toSave = new ServerCredentials ( credentials ) ; toSave . decryptPassword ( keyManager . getPrivateKey ( ) ) ; // decrypt the password String de = toSave . getPassword ( ) ; String ha = BCrypt . hashpw ( de , BCrypt . gensalt ( 10 ) ) ; toSave . setOwnerId ( dao . count ( Credentials . class . getName ( ) ) + 1 ) ; toSave . setPassword ( ha ) ; // hash the password for storage toSave . setAuthToken ( AuthTokenUtils . getNewToken ( keyManager . getSymmetricKey ( ) , toSave ) ) ; toSave . setRecoveryToken ( AuthTokenUtils . getNewToken ( keyManager . getSymmetricKey ( ) , toSave ) ) ; dao . save ( toSave ) ; return toSave ;
public class AsyncMutateInBuilder { /** * Perform several { @ link Mutation mutation } operations inside a single existing { @ link JsonDocument JSON document } * and watch for durability requirements . * The list of mutations and paths to mutate in the JSON is added through builder methods like * { @ link # arrayInsert ( String , Object ) } . * Multi - mutations are applied as a whole , atomically at the document level . That means that if one of the mutations * fails , none of the mutations are applied . Otherwise , all mutations can be considered successful and the whole * operation will receive a { @ link DocumentFragment } with the updated cas ( and optionally { @ link MutationToken } ) . * The subdocument API has the benefit of only transmitting the fragment of the document you want to mutate * on the wire , instead of the whole document . * This Observable most notable error conditions are : * - The enclosing document does not exist : { @ link DocumentDoesNotExistException } * - The enclosing document is not JSON : { @ link DocumentNotJsonException } * - No mutation was defined through the builder API : { @ link IllegalArgumentException } * - A mutation spec couldn ' t be encoded and the whole operation was cancelled : { @ link TranscodingException } * - The multi - mutation failed : { @ link MultiMutationException } * - The durability constraint could not be fulfilled because of a temporary or persistent problem : { @ link DurabilityException } * - CAS was provided but optimistic locking failed : { @ link CASMismatchException } * When receiving a { @ link MultiMutationException } , one can inspect the exception to find the zero - based index and * error { @ link ResponseStatus status code } of the first failing { @ link Mutation } . Subsequent mutations may have * also failed had they been attempted , but a single spec failing causes the whole operation to be cancelled . * Other top - level error conditions are similar to those encountered during a document - level { @ link AsyncBucket # replace ( Document ) } . * A { @ link DurabilityException } typically happens if the given amount of replicas needed to fulfill the durability * requirement cannot be met because either the bucket does not have enough replicas configured or they are not * available in a failover event . As an example , if one replica is configured and { @ link ReplicateTo # TWO } is used , * the observable is errored with a { @ link DurabilityException } . The same can happen if one replica is configured , * but one node has been failed over and not yet rebalanced ( hence , on a subset of the partitions there is no * replica available ) . * * It is important to understand that the original execute has already happened , so the actual * execute and the watching for durability requirements are two separate tasks internally . * * * @ param persistTo the persistence requirement to watch . * @ return an { @ link Observable } of a single { @ link DocumentFragment } ( if successful ) containing updated cas metadata . * Note that some individual results could also bear a value , like counter operations . */ public Observable < DocumentFragment < Mutation > > execute ( PersistTo persistTo ) { } }
return execute ( persistTo , ReplicateTo . NONE , 0 , null ) ;
public class AdaptiveTableLayout { /** * Refresh current row header view holder . * @ param holder current view holder */ private void refreshHeaderRowViewHolder ( ViewHolder holder ) { } }
int top = mManager . getRowsHeight ( 0 , Math . max ( 0 , holder . getRowIndex ( ) ) ) + mManager . getHeaderColumnHeight ( ) ; int left = calculateRowHeadersLeft ( ) ; if ( isRTL ( ) ) { left += mSettings . getCellMargin ( ) ; } View view = holder . getItemView ( ) ; int leftMargin = holder . getColumnIndex ( ) * mSettings . getCellMargin ( ) + mSettings . getCellMargin ( ) ; int topMargin = holder . getRowIndex ( ) * mSettings . getCellMargin ( ) + mSettings . getCellMargin ( ) ; if ( holder . isDragging ( ) && mDragAndDropPoints . getOffset ( ) . y > 0 ) { top = mState . getScrollY ( ) + mDragAndDropPoints . getOffset ( ) . y - view . getHeight ( ) / 2 ; view . bringToFront ( ) ; } if ( holder . isDragging ( ) ) { View topShadow = mShadowHelper . getTopShadow ( ) ; View bottomShadow = mShadowHelper . getBottomShadow ( ) ; if ( topShadow != null ) { int shadowTop = top - mState . getScrollY ( ) ; topShadow . layout ( 0 , Math . max ( mManager . getHeaderColumnHeight ( ) - mState . getScrollY ( ) , shadowTop - SHADOW_THICK ) + topMargin , mSettings . getLayoutWidth ( ) , shadowTop + topMargin ) ; topShadow . bringToFront ( ) ; } if ( bottomShadow != null ) { int shadowBottom = top - mState . getScrollY ( ) + mManager . getRowHeight ( holder . getRowIndex ( ) ) ; bottomShadow . layout ( 0 , Math . max ( mManager . getHeaderColumnHeight ( ) - mState . getScrollY ( ) , shadowBottom ) + topMargin , mSettings . getLayoutWidth ( ) , shadowBottom + SHADOW_THICK + topMargin ) ; bottomShadow . bringToFront ( ) ; } } // noinspection ResourceType view . layout ( left + leftMargin * ( isRTL ( ) ? 0 : 1 ) , top - mState . getScrollY ( ) + topMargin , left + mManager . getHeaderRowWidth ( ) + leftMargin * ( isRTL ( ) ? 1 : 0 ) , top + mManager . getRowHeight ( holder . getRowIndex ( ) ) - mState . getScrollY ( ) + topMargin ) ; if ( mState . isColumnDragging ( ) ) { view . bringToFront ( ) ; } if ( ! mState . isRowDragging ( ) ) { View shadow = mShadowHelper . getRowsHeadersShadow ( ) ; if ( shadow == null ) { shadow = mShadowHelper . addRowsHeadersShadow ( this ) ; } int shadowStart , shadowEnd ; shadowStart = ! isRTL ( ) ? view . getRight ( ) : view . getLeft ( ) - SHADOW_HEADERS_THICK ; shadowEnd = shadowStart + SHADOW_HEADERS_THICK ; shadow . layout ( shadowStart , mState . isColumnDragging ( ) ? 0 : mSettings . isHeaderFixed ( ) ? 0 : - mState . getScrollY ( ) , shadowEnd , mSettings . getLayoutHeight ( ) ) ; shadow . bringToFront ( ) ; }
public class CSVConfig { /** * If the ' tls ' option is true , return the TLS parameters packaged in an * { @ link SSLTransportParameters } object . */ public SSLTransportParameters getTLSParams ( ) { } }
if ( tls && sslParams == null ) { sslParams = new SSLTransportParameters ( ) ; sslParams . setKeyStore ( keystore , keystorepassword ) ; sslParams . setTrustStore ( truststore , truststorepassword ) ; } return sslParams ;
public class SavedScriptRunner { /** * Run a script with parameters . * @ param scriptName name of the script to run * @ param parameters parameters for the script * @ return ScriptResult * @ throws UnknownScriptException if scriptName is unknown * @ throws GenerateScriptException , if parameter is missing */ public ScriptResult runScript ( String scriptName , Map < String , Object > parameters ) { } }
Script script = dataService . query ( SCRIPT , Script . class ) . eq ( ScriptMetadata . NAME , scriptName ) . findOne ( ) ; if ( script == null ) { throw new UnknownEntityException ( SCRIPT , scriptName ) ; } if ( script . getParameters ( ) != null ) { for ( ScriptParameter param : script . getParameters ( ) ) { if ( ! parameters . containsKey ( param . getName ( ) ) ) { throw new GenerateScriptException ( "Missing parameter [" + param + "]" ) ; } } } Map < String , Object > scriptParameters = new HashMap < > ( parameters ) ; if ( script . isGenerateToken ( ) ) { String token = tokenService . generateAndStoreToken ( SecurityUtils . getCurrentUsername ( ) , "For script " + script . getName ( ) ) ; scriptParameters . put ( "molgenisToken" , token ) ; } ScriptRunner scriptRunner = scriptRunnerFactory . getScriptRunner ( script . getScriptType ( ) . getName ( ) ) ; FileMeta fileMeta = null ; if ( scriptRunner . hasFileOutput ( script ) ) { String name = generateRandomString ( ) ; String resultFileExtension = script . getResultFileExtension ( ) ; if ( resultFileExtension != null ) { name += "." + script . getResultFileExtension ( ) ; } File file = fileStore . getFileUnchecked ( name ) ; scriptParameters . put ( "outputFile" , file . getAbsolutePath ( ) ) ; fileMeta = createFileMeta ( name , file ) ; dataService . add ( FILE_META , fileMeta ) ; } String output = scriptRunner . runScript ( script , scriptParameters ) ; return new ScriptResult ( fileMeta , output ) ;
public class Model { /** * Sets attribute value as < code > String < / code > . * If there is a { @ link Converter } registered for the attribute that converts from Class < code > S < / code > to Class * < code > java . lang . String < / code > , given the value is an instance of < code > S < / code > , then it will be used , * otherwise performs a conversion using { @ link Convert # toString ( Object ) } . * @ param attributeName name of attribute . * @ param value value * @ return reference to this model . */ public < T extends Model > T setString ( String attributeName , Object value ) { } }
Converter < Object , String > converter = modelRegistryLocal . converterForValue ( attributeName , value , String . class ) ; return setRaw ( attributeName , converter != null ? converter . convert ( value ) : Convert . toString ( value ) ) ;
public class ApplicationTracker { /** * Returns true if the application with the specified name is started , otherwise false . * @ return true if the application with the specified name is started , otherwise false . */ boolean isStarted ( String appName ) { } }
lock . readLock ( ) . lock ( ) ; try { return appStates . get ( appName ) == ApplicationState . STARTED ; } finally { lock . readLock ( ) . unlock ( ) ; }
public class MessageRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( MessageRequest messageRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( messageRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( messageRequest . getAddresses ( ) , ADDRESSES_BINDING ) ; protocolMarshaller . marshall ( messageRequest . getContext ( ) , CONTEXT_BINDING ) ; protocolMarshaller . marshall ( messageRequest . getEndpoints ( ) , ENDPOINTS_BINDING ) ; protocolMarshaller . marshall ( messageRequest . getMessageConfiguration ( ) , MESSAGECONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( messageRequest . getTraceId ( ) , TRACEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StringUtils { /** * Extract the filename from the given Java resource path , e . g . * { @ code " mypath / myfile . txt " - > " myfile . txt " } . * @ param path the file path ( may be { @ code null } ) * @ return the extracted filename , or { @ code null } if none */ @ Nullable public static String getFilename ( @ Nullable final String path ) { } }
if ( path == null ) { return null ; } final int separatorIndex = path . lastIndexOf ( StringUtils . FOLDER_SEPARATOR ) ; return separatorIndex != - 1 ? path . substring ( separatorIndex + 1 ) : path ;
public class LwjgFontFactory { /** * TODO リファクタ */ private void extractCharacterFilesFromDir ( URL urlCharacters ) throws URISyntaxException , IOException { } }
File dir = new File ( urlCharacters . toURI ( ) ) ; String pathMask = urlCharacters . getPath ( ) ; ResourceExtractor resourceExtractor = new ResourceExtractor ( ) ; // Windows 環境では絶対パスの先頭に / がつかないので 、 取り除く if ( ( File . separator . equals ( "\\" ) ) && ( pathMask . startsWith ( "/" ) ) ) { pathMask = pathMask . substring ( 1 ) ; } for ( File nextFile : dir . listFiles ( ) ) { String filePath = nextFile . getPath ( ) ; filePath = filePath . substring ( pathMask . length ( ) ) ; filePath = "characters/" + filePath ; resourceExtractor . addResourcePath ( filePath , nextFile . getName ( ) ) ; } resourceExtractor . setResourcesDir ( "characters" ) ; resourceExtractor . copy ( ) ;
public class AWSDatabaseMigrationServiceClient { /** * Returns information about replication tasks for your account in the current region . * @ param describeReplicationTasksRequest * @ return Result of the DescribeReplicationTasks operation returned by the service . * @ throws ResourceNotFoundException * The resource could not be found . * @ sample AWSDatabaseMigrationService . DescribeReplicationTasks * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dms - 2016-01-01 / DescribeReplicationTasks " target = " _ top " > AWS * API Documentation < / a > */ @ Override public DescribeReplicationTasksResult describeReplicationTasks ( DescribeReplicationTasksRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeReplicationTasks ( request ) ;
public class SequentialKnowledgeHelper { public Object get ( final Declaration declaration ) { } }
return declaration . getValue ( workingMemory , this . tuple . getObject ( declaration ) ) ;
public class UpdateTagSupport { /** * Private utility methods */ private Connection getConnection ( ) throws JspException , SQLException { } }
// Fix : Add all other mechanisms Connection conn = null ; isPartOfTransaction = false ; TransactionTagSupport parent = ( TransactionTagSupport ) findAncestorWithClass ( this , TransactionTagSupport . class ) ; if ( parent != null ) { if ( dataSourceSpecified ) { throw new JspTagException ( Resources . getMessage ( "ERROR_NESTED_DATASOURCE" ) ) ; } conn = parent . getSharedConnection ( ) ; isPartOfTransaction = true ; } else { if ( ( rawDataSource == null ) && dataSourceSpecified ) { throw new JspException ( Resources . getMessage ( "SQL_DATASOURCE_NULL" ) ) ; } DataSource dataSource = DataSourceUtil . getDataSource ( rawDataSource , pageContext ) ; try { conn = dataSource . getConnection ( ) ; } catch ( Exception ex ) { throw new JspException ( Resources . getMessage ( "DATASOURCE_INVALID" , ex . toString ( ) ) ) ; } } return conn ;
public class JobsInner { /** * List all directories and files inside the given directory of the Job ' s output directory ( if the output directory is on Azure File Share or Azure Storage Container ) . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param experimentName The name of the experiment . Experiment names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param jobName The name of the job within the specified resource group . Job names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param jobsListOutputFilesOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; FileInner & gt ; object if successful . */ public PagedList < FileInner > listOutputFiles ( final String resourceGroupName , final String workspaceName , final String experimentName , final String jobName , final JobsListOutputFilesOptions jobsListOutputFilesOptions ) { } }
ServiceResponse < Page < FileInner > > response = listOutputFilesSinglePageAsync ( resourceGroupName , workspaceName , experimentName , jobName , jobsListOutputFilesOptions ) . toBlocking ( ) . single ( ) ; return new PagedList < FileInner > ( response . body ( ) ) { @ Override public Page < FileInner > nextPage ( String nextPageLink ) { return listOutputFilesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class ConnectionManagerImpl { /** * Release connection to the { @ link org . apache . ojb . broker . accesslayer . ConnectionFactory } , make * sure that you call the method in either case , it ' s the only way to free the connection . */ public void releaseConnection ( ) { } }
if ( this . con == null ) { return ; } if ( isInLocalTransaction ( ) ) { log . error ( "Release connection: connection is in local transaction, missing 'localCommit' or" + " 'localRollback' call - try to rollback the connection" ) ; localRollback ( ) ; } else { this . connectionFactory . releaseConnection ( this . jcd , this . con ) ; this . con = null ; this . batchCon = null ; }
public class ESHttpUtils { /** * Returns a single node from a given document using xpath . * @ param rootNode * Node to search . * @ param xPath * XPath to use . * @ param expression * XPath expression . * @ return Node or < code > null < / code > if no match was found . */ @ Nullable public static Node findNode ( @ NotNull final Node rootNode , @ NotNull final XPath xPath , @ NotNull final String expression ) { } }
Contract . requireArgNotNull ( "rootNode" , rootNode ) ; Contract . requireArgNotNull ( "xPath" , xPath ) ; Contract . requireArgNotNull ( "expression" , expression ) ; try { return ( Node ) xPath . compile ( expression ) . evaluate ( rootNode , XPathConstants . NODE ) ; } catch ( final XPathExpressionException ex ) { throw new RuntimeException ( "Failed to read node: " + expression , ex ) ; }
public class JQMDataTable { /** * You have to call refreshColumns ( ) to update head and body ( if widget is already loaded ) . */ public void setColumns ( List < ColumnDefEx > cols ) { } }
clearHead ( ) ; datacols . clear ( ) ; if ( ! Empty . is ( cols ) ) datacols . addAll ( cols ) ; // head will be created later in onLoad ( ) or by refreshColumns ( )
public class CertFactory { /** * Factory method for creating a new { @ link X509Certificate } from the given certificate type and * certificate data as byte array . * @ param type * the certificate type * @ param certificateData * the certificate data as byte array * @ return the new { @ link X509Certificate } * @ throws CertificateException * is thrown if no Provider supports a CertificateFactorySpi implementation for the * given certificate type . */ public static X509Certificate newX509Certificate ( final String type , final byte [ ] certificateData ) throws CertificateException { } }
final CertificateFactory cf = CertificateFactory . getInstance ( type ) ; final InputStream inputStream = new ByteArrayInputStream ( certificateData ) ; final X509Certificate certificate = ( X509Certificate ) cf . generateCertificate ( inputStream ) ; return certificate ;
public class PixelMath { /** * Bounds image pixels to be between these two values * @ param img Image * @ param min minimum value . * @ param max maximum value . */ public static void boundImage ( GrayS16 img , int min , int max ) { } }
ImplPixelMath . boundImage ( img , min , max ) ;
public class PullFileLoader { /** * Find and load all pull files under a base { @ link Path } recursively in an order sorted by last modified date . * @ param path base { @ link Path } where pull files should be found recursively . * @ param sysProps A { @ link Config } used as fallback . * @ param loadGlobalProperties if true , will also load at most one * . properties file per directory from the * { @ link # rootDirectory } to the pull file { @ link Path } for each pull file . * @ return The loaded { @ link Config } s . */ public List < Config > loadPullFilesRecursively ( Path path , Config sysProps , boolean loadGlobalProperties ) { } }
return Lists . transform ( this . fetchJobFilesRecursively ( path ) , new Function < Path , Config > ( ) { @ Nullable @ Override public Config apply ( @ Nullable Path jobFile ) { if ( jobFile == null ) { return null ; } try { return PullFileLoader . this . loadPullFile ( jobFile , sysProps , loadGlobalProperties ) ; } catch ( IOException e ) { log . error ( "Cannot load job from {} due to {}" , jobFile , ExceptionUtils . getFullStackTrace ( e ) ) ; return null ; } } } ) ;
public class Win32File { /** * Wraps a { @ code File } object pointing to a Windows symbolic link * ( { @ code . lnk } file ) in a { @ code Win32Lnk } . * If the operating system is not Windows , the * { @ code pPath } parameter is returned unwrapped . * @ param pPath any path , possibly pointing to a Windows symbolic link file . * May be { @ code null } , in which case { @ code null } is returned . * @ return a new { @ code Win32Lnk } object if the current os is Windows , and * the file is a Windows symbolic link ( { @ code . lnk } file ) , otherwise * { @ code pPath } */ public static File wrap ( final File pPath ) { } }
if ( pPath == null ) { return null ; } if ( IS_WINDOWS ) { // Don ' t wrap if allready wrapped if ( pPath instanceof Win32File || pPath instanceof Win32Lnk ) { return pPath ; } if ( pPath . exists ( ) && pPath . getName ( ) . endsWith ( ".lnk" ) ) { // If Win32 . lnk , let ' s wrap try { return new Win32Lnk ( pPath ) ; } catch ( IOException e ) { // TODO : FixMe ! e . printStackTrace ( ) ; } } // Wwrap even if not a . lnk , as the listFiles ( ) methods etc , // could potentially return . lnk ' s , that we want to wrap later . . . return new Win32File ( pPath ) ; } return pPath ;
public class FileTools { /** * Read a file of properties and notifies all the listeners for each property . * @ param source * the source input stream . * @ param encoding * the encoding of the file . * @ param listeners * a list of any { @ link PropertiesParsingListener } * @ throws IOException * @ throws SyntaxErrorException * @ throws IOException * if there is a problem to deal with . * @ throws SyntaxErrorException * if there is a problem to deal with . * @ see LineByLinePropertyParser * @ since 16.09.00 */ public static void readPropertiesToListeners ( InputStream source , Encoding encoding , PropertiesParsingListener ... listeners ) throws IOException , SyntaxErrorException { } }
BufferedReader _reader = new BufferedReader ( createReaderForInputStream ( source , encoding ) ) ; LineByLinePropertyParser _parser = new LineByLinePropertyParser ( ) ; for ( PropertiesParsingListener _listener : listeners ) { _parser . addListener ( _listener ) ; } for ( String _line = _reader . readLine ( ) ; _line != null ; ) { _parser . parseLine ( _line ) ; _line = _reader . readLine ( ) ; } _reader . close ( ) ;
public class GraphOfTheGodsFactory { /** * Calls { @ link TitanFactory # open ( String ) } , passing the Titan configuration file path * which must be the sole element in the { @ code args } array , then calls * { @ link # load ( com . thinkaurelius . titan . core . TitanGraph ) } on the opened graph , * then calls { @ link com . thinkaurelius . titan . core . TitanGraph # close ( ) } * and returns . * This method may call { @ link System # exit ( int ) } if it encounters an error , such as * failure to parse its arguments . Only use this method when executing main from * a command line . Use one of the other methods on this class ( { @ link # create ( String ) } * or { @ link # load ( com . thinkaurelius . titan . core . TitanGraph ) } ) when calling from * an enclosing application . * @ param args a singleton array containing a path to a Titan config properties file */ public static void main ( String args [ ] ) { } }
if ( null == args || 1 != args . length ) { System . err . println ( "Usage: GraphOfTheGodsFactory <titan-config-file>" ) ; System . exit ( 1 ) ; } TitanGraph g = TitanFactory . open ( args [ 0 ] ) ; load ( g ) ; g . close ( ) ;
public class SvgGraphicsContext { /** * Set a specific cursor on an element of this < code > GraphicsContext < / code > . * @ param object * the element on which the controller should be set . * @ param cursor * The string representation of the cursor to use . */ public void setCursor ( Object object , String cursor ) { } }
if ( isAttached ( ) ) { helper . setCursor ( object , cursor ) ; }
public class DelaunayTriangulation { /** * Calculates a Voronoi cell for a given neighborhood * in this triangulation . A neighborhood is defined by a triangle * and one of its corner points . * By Udi Schneider * @ param triangle a triangle in the neighborhood * @ param p corner point whose surrounding neighbors will be checked * @ return set of Points representing the cell polygon */ public Vector3 [ ] calcVoronoiCell ( Triangle triangle , Vector3 p ) { } }
// handle any full triangle if ( ! triangle . isHalfplane ( ) ) { // get all neighbors of given corner point List < Triangle > neighbors = findTriangleNeighborhood ( triangle , p ) ; Iterator < Triangle > itn = neighbors . iterator ( ) ; Vector3 [ ] vertices = new Vector3 [ neighbors . size ( ) ] ; // for each neighbor , including the given triangle , add // center of circumscribed circle to cell polygon int index = 0 ; while ( itn . hasNext ( ) ) { Triangle tmp = itn . next ( ) ; vertices [ index ++ ] = tmp . circumcircle ( ) . getCenter ( ) ; } return vertices ; } // handle half plane // in this case , the cell is a single line // which is the perpendicular bisector of the half plane line else { // local friendly alias Triangle halfplane = triangle ; // third point of triangle adjacent to this half plane // ( the point not shared with the half plane ) Vector3 third = null ; // triangle adjacent to the half plane Triangle neighbor = null ; // find the neighbor triangle if ( ! halfplane . next_12 ( ) . isHalfplane ( ) ) { neighbor = halfplane . next_12 ( ) ; } else if ( ! halfplane . next_23 ( ) . isHalfplane ( ) ) { neighbor = halfplane . next_23 ( ) ; } else if ( ! halfplane . next_23 ( ) . isHalfplane ( ) ) { neighbor = halfplane . next_31 ( ) ; } // find third point of neighbor triangle // ( the one which is not shared with current half plane ) // this is used in determining half plane orientation if ( ! neighbor . p1 ( ) . equals ( halfplane . p1 ( ) ) && ! neighbor . p1 ( ) . equals ( halfplane . p2 ( ) ) ) third = neighbor . p1 ( ) ; if ( ! neighbor . p2 ( ) . equals ( halfplane . p1 ( ) ) && ! neighbor . p2 ( ) . equals ( halfplane . p2 ( ) ) ) third = neighbor . p2 ( ) ; if ( ! neighbor . p3 ( ) . equals ( halfplane . p1 ( ) ) && ! neighbor . p3 ( ) . equals ( halfplane . p2 ( ) ) ) third = neighbor . p3 ( ) ; // delta ( slope ) of half plane edge float halfplane_delta = ( halfplane . p1 ( ) . y - halfplane . p2 ( ) . y ) / ( halfplane . p1 ( ) . x - halfplane . p2 ( ) . x ) ; // delta of line perpendicular to current half plane edge float perp_delta = ( 1.0f / halfplane_delta ) * ( - 1.0f ) ; // determine orientation : find if the third point of the triangle // lies above or below the half plane // works by finding the matching y value on the half plane line equation // for the same x value as the third point float y_orient = halfplane_delta * ( third . x - halfplane . p1 ( ) . x ) + halfplane . p1 ( ) . y ; boolean above = true ; if ( y_orient > third . y ) above = false ; // based on orientation , determine cell line direction // ( towards right or left side of window ) float sign = 1.0f ; if ( ( perp_delta < 0 && ! above ) || ( perp_delta > 0 && above ) ) { sign = - 1.0f ; } // the cell line is a line originating from the circumcircle to infinity // x = 500.0 is used as a large enough value Vector3 circumcircle = neighbor . circumcircle ( ) . getCenter ( ) ; float x_cell_line = ( circumcircle . x + ( 500.0f * sign ) ) ; float y_cell_line = perp_delta * ( x_cell_line - circumcircle . x ) + circumcircle . y ; Vector3 [ ] result = new Vector3 [ 2 ] ; result [ 0 ] = circumcircle ; result [ 1 ] = new Vector3 ( x_cell_line , y_cell_line , 0 ) ; return result ; }
public class CouchbaseSchemaManager { /** * Removes the bucket . * @ param name * the name */ private void removeBucket ( String name ) { } }
try { if ( clusterManager . removeBucket ( name ) ) { LOGGER . info ( "Bucket [" + name + "] is removed!" ) ; } else { LOGGER . error ( "Not able to remove bucket [" + name + "]." ) ; throw new KunderaException ( "Not able to remove bucket [" + name + "]." ) ; } } catch ( CouchbaseException ex ) { LOGGER . error ( "Not able to remove bucket [" + name + "]." , ex ) ; throw new KunderaException ( "Not able to remove bucket [" + name + "]." , ex ) ; }
public class LRImporter { /** * Obtain the path used for a harvest request * @ param requestID the " request _ ID " parameter for the request * @ param byResourceID the " by _ resource _ ID " parameter for the request * @ param byDocID the " by _ doc _ ID " parameter for the request * @ return the string of the path for a harvest request */ private String getHarvestRequestPath ( String requestID , Boolean byResourceID , Boolean byDocID ) { } }
String path = harvestPath ; if ( requestID != null ) { path += "?" + requestIDParam + "=" + requestID ; } else { // error return null ; } if ( byResourceID ) { path += "&" + byResourceIDParam + "=" + booleanTrueString ; } else { path += "&" + byResourceIDParam + "=" + booleanFalseString ; } if ( byDocID ) { path += "&" + byDocIDParam + "=" + booleanTrueString ; } else { path += "&" + byDocIDParam + "=" + booleanFalseString ; } return path ;
public class SynchronizedIO { /** * Write binary file data * @ param aFileName * @ param aData * @ throws Exception */ public void writeFile ( String aFileName , URL aData ) throws Exception { } }
Object lock = retrieveLock ( aFileName ) ; synchronized ( lock ) { IO . writeFile ( aFileName , aData ) ; }
public class LongStream { /** * Creates a { @ code LongStream } from { @ code PrimitiveIterator . OfLong } . * @ param iterator the iterator with elements to be passed to stream * @ return the new { @ code LongStream } * @ throws NullPointerException if { @ code iterator } is null */ @ NotNull public static LongStream of ( @ NotNull PrimitiveIterator . OfLong iterator ) { } }
Objects . requireNonNull ( iterator ) ; return new LongStream ( iterator ) ;
public class HttpRedirectBindingUtil { /** * Converts an { @ link AggregatedHttpMessage } which is received from the remote entity to * a { @ link SAMLObject } . */ @ SuppressWarnings ( "unchecked" ) static < T extends SAMLObject > MessageContext < T > toSamlObject ( AggregatedHttpMessage msg , String name , Map < String , SamlIdentityProviderConfig > idpConfigs , @ Nullable SamlIdentityProviderConfig defaultIdpConfig ) { } }
requireNonNull ( msg , "msg" ) ; requireNonNull ( name , "name" ) ; requireNonNull ( idpConfigs , "idpConfigs" ) ; final SamlParameters parameters = new SamlParameters ( msg ) ; final T message = ( T ) fromDeflatedBase64 ( parameters . getFirstValue ( name ) ) ; final MessageContext < T > messageContext = new MessageContext < > ( ) ; messageContext . setMessage ( message ) ; final Issuer issuer ; if ( message instanceof RequestAbstractType ) { issuer = ( ( RequestAbstractType ) message ) . getIssuer ( ) ; } else if ( message instanceof StatusResponseType ) { issuer = ( ( StatusResponseType ) message ) . getIssuer ( ) ; } else { throw new SamlException ( "invalid message type: " + message . getClass ( ) . getSimpleName ( ) ) ; } // Use the default identity provider config if there ' s no issuer . final SamlIdentityProviderConfig config ; if ( issuer != null ) { final String idpEntityId = issuer . getValue ( ) ; config = idpConfigs . get ( idpEntityId ) ; if ( config == null ) { throw new SamlException ( "a message from unknown identity provider: " + idpEntityId ) ; } } else { if ( defaultIdpConfig == null ) { throw new SamlException ( "failed to get an Issuer element" ) ; } config = defaultIdpConfig ; } // If this message is sent via HTTP - redirect binding protocol , its signature parameter should // be validated . validateSignature ( config . signingCredential ( ) , parameters , name ) ; final String relayState = parameters . getFirstValueOrNull ( RELAY_STATE ) ; if ( relayState != null ) { final SAMLBindingContext context = messageContext . getSubcontext ( SAMLBindingContext . class , true ) ; assert context != null ; context . setRelayState ( relayState ) ; } return messageContext ;
public class GSAPImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setQ ( Integer newQ ) { } }
Integer oldQ = q ; q = newQ ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . GSAP__Q , oldQ , q ) ) ;
public class CaseSyntax { /** * Converts to this { @ link CaseSyntax } . The first character will be converted using { @ link # getFirstCase ( ) } . * Characters other than { @ link Character # isLetterOrDigit ( char ) letters or digits } are considered as word separator . * In the most cases , they will be * @ param string the { @ link String } to convert . * @ param locale the explicit { @ link Locale } to use . In most cases you want to use { @ link # convert ( String ) } instead . * @ return the String converted to this { @ link CaseSyntax } . */ public String convert ( String string , Locale locale ) { } }
if ( ( string == null ) || ( string . isEmpty ( ) ) ) { return string ; } // fast and simple cases first . . . if ( ( ! hasWordSeparator ( ) ) && ( this . wordStartCase == this . otherCase ) ) { String s = string ; if ( this . wordSeparator == null ) { s = removeSpecialCharacters ( s ) ; } if ( this . firstCase == this . wordStartCase ) { s = this . firstCase . convert ( s , locale ) ; } else { String first = this . firstCase . convert ( s . substring ( 0 , 1 ) , locale ) ; String rest = this . otherCase . convert ( s . substring ( 1 ) , locale ) ; s = first + rest ; } return s ; } CharIterator charIterator = new SequenceCharIterator ( string ) ; StringBuilder buffer = new StringBuilder ( string . length ( ) + 4 ) ; char c = charIterator . next ( ) ; CharClass previousClass = CharClass . of ( c ) ; while ( previousClass . isSeparatorOrDollar ( ) ) { if ( ( buffer . length ( ) == 0 ) && hasWordSeparator ( ) ) { buffer . append ( this . wordSeparator ) ; } if ( ! charIterator . hasNext ( ) ) { return buffer . toString ( ) ; } c = charIterator . next ( ) ; previousClass = CharClass . of ( c ) ; } appendCasedChar ( buffer , c , this . firstCase ) ; CaseConversion previousCase = CaseConversion . ofExample ( c , false ) ; int start = 1 ; int end = start ; while ( charIterator . hasNext ( ) ) { c = charIterator . next ( ) ; CharClass currentClass = CharClass . of ( c ) ; CaseConversion currentCase = CaseConversion . ofExample ( c , false ) ; switch ( currentClass ) { case LETTER : if ( previousClass . isSeparatorOrDollar ( ) ) { appendCasedChar ( buffer , c , this . wordStartCase ) ; start ++ ; } else if ( currentCase != previousCase ) { if ( ( currentCase == CaseConversion . UPPER_CASE ) && ( end > 1 ) ) { assert ( previousCase == CaseConversion . LOWER_CASE ) ; start = appendOthers ( string , buffer , start , end ) ; if ( hasWordSeparator ( ) ) { buffer . append ( this . wordSeparator ) ; } appendCasedChar ( buffer , c , this . wordStartCase ) ; } } break ; case SEPARATOR : case DOLLAR : if ( ! previousClass . isSeparatorOrDollar ( ) ) { start = appendOthers ( string , buffer , start , end ) ; if ( KEEP_SPECIAL_CHARS . equals ( this . wordSeparator ) ) { buffer . append ( c ) ; } else if ( this . wordSeparator != null ) { buffer . append ( this . wordSeparator ) ; } } else { start ++ ; } break ; default : break ; } if ( currentClass != CharClass . DIGIT ) { previousClass = currentClass ; previousCase = currentCase ; } end ++ ; } if ( start < end ) { appendOthers ( string , buffer , start , end ) ; } return buffer . toString ( ) ;
public class CudaZeroHandler { /** * This method returns set of allocation tracking IDs for specific device * @ param deviceId * @ return */ @ Override public Set < Long > getDeviceTrackingPoints ( Integer deviceId ) { } }
return deviceAllocations . get ( deviceId ) . keySet ( ) ;
public class AbstractBioConnector { /** * THIS IS A BUG IN JAVA , should not need two methods just to capture the type */ protected < F extends ConnectFuture > ConnectFuture connectInternal ( final ResourceAddress remoteAddress , final IoHandler handler , final IoSessionInitializer < F > initializer ) { } }
ConnectFuture future ; final String nextProtocol = remoteAddress . getOption ( ResourceAddress . NEXT_PROTOCOL ) ; ResourceAddress transport = remoteAddress . getTransport ( ) ; if ( transport != null ) { BridgeConnector connector = bridgeServiceFactory . newBridgeConnector ( transport ) ; future = connector . connect ( transport , handler , new IoSessionInitializer < F > ( ) { @ Override public void initializeSession ( IoSession session , F future ) { REMOTE_ADDRESS . set ( session , remoteAddress ) ; setLocalAddressFromSocketAddress ( session , getTransportName ( ) , nextProtocol ) ; if ( initializer != null ) { initializer . initializeSession ( session , future ) ; } } } ) ; } else { T socketAddress = socketAddressFactory . createSocketAddress ( remoteAddress ) ; future = connector . connect ( socketAddress , new IoSessionInitializer < F > ( ) { @ Override public void initializeSession ( IoSession session , F future ) { // connectors don ' t need lookup so set this directly on the session session . setAttribute ( BridgeConnectHandler . DELEGATE_KEY , handler ) ; REMOTE_ADDRESS . set ( session , remoteAddress ) ; setLocalAddressFromSocketAddress ( session , getTransportName ( ) , nextProtocol ) ; if ( initializer != null ) { initializer . initializeSession ( session , future ) ; } } } ) ; } return future ;
public class Table { /** * Starts a BigQuery Job to load data into the current table from the provided source URIs . * Returns the started { @ link Job } object . * < p > Example loading data from a list of Google Cloud Storage files . * < pre > { @ code * String gcsUrl1 = " gs : / / my _ bucket / filename1 . csv " ; * String gcsUrl2 = " gs : / / my _ bucket / filename2 . csv " ; * List < String > sourceUris = new ArrayList < > ( ) ; * sourceUris . add ( gcsUrl1 ) ; * sourceUris . add ( gcsUrl2 ) ; * Job job = table . load ( FormatOptions . csv ( ) , sourceUris ) ; * / / Wait for the job to complete * try { * Job completedJob = job . waitFor ( RetryOption . initialRetryDelay ( Duration . ofSeconds ( 1 ) ) , * RetryOption . totalTimeout ( Duration . ofMinutes ( 3 ) ) ) ; * if ( completedJob ! = null & & completedJob . getStatus ( ) . getError ( ) = = null ) { * / / Job completed successfully * } else { * / / Handle error case * } catch ( InterruptedException e ) { * / / Handle interrupted wait * } < / pre > * @ param format the format of the exported data * @ param sourceUris the fully - qualified Google Cloud Storage URIs ( e . g . gs : / / bucket / path ) from * which to load the data * @ param options job options * @ throws BigQueryException upon failure */ public Job load ( FormatOptions format , List < String > sourceUris , JobOption ... options ) throws BigQueryException { } }
LoadJobConfiguration loadConfig = LoadJobConfiguration . of ( getTableId ( ) , sourceUris , format ) ; return bigquery . create ( JobInfo . of ( loadConfig ) , options ) ;
public class MiniSatStyleSolver { /** * Returns the assigned value of a given literal . * @ param lit the literal * @ return the assigned value of the literal */ protected Tristate value ( int lit ) { } }
return sign ( lit ) ? Tristate . negate ( this . v ( lit ) . assignment ( ) ) : this . v ( lit ) . assignment ( ) ;
public class NBTIO { /** * Reads the root CompoundTag from the given file . * @ param file File to read from . * @ param compressed Whether the NBT file is compressed . * @ param littleEndian Whether the NBT file is little endian . * @ return The read compound tag . * @ throws java . io . IOException If an I / O error occurs . */ public static CompoundTag readFile ( File file , boolean compressed , boolean littleEndian ) throws IOException { } }
InputStream in = new FileInputStream ( file ) ; if ( compressed ) { in = new GZIPInputStream ( in ) ; } Tag tag = readTag ( in , littleEndian ) ; if ( ! ( tag instanceof CompoundTag ) ) { throw new IOException ( "Root tag is not a CompoundTag!" ) ; } return ( CompoundTag ) tag ;
public class SimonConnectionConfiguration { /** * Tests whether URL is a Simon JDBC connection URL . */ public static boolean isSimonUrl ( String url ) { } }
return url != null && url . toLowerCase ( ) . startsWith ( SimonConnectionConfiguration . URL_PREFIX ) ;
public class StyleSheetUpdater { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( styleSheet != null ) { request . addPostParam ( "StyleSheet" , Converter . mapToJson ( styleSheet ) ) ; }
public class LatchedObserver { /** * Create a LatchedObserver with the given indexed callback function ( s ) and a shared latch . */ public static < T > LatchedObserver < T > createIndexed ( Action2 < ? super T , ? super Integer > onNext , CountDownLatch latch ) { } }
return new LatchedObserverIndexedImpl < T > ( onNext , Functionals . emptyThrowable ( ) , Functionals . empty ( ) , latch ) ;
public class AbstractRoller { /** * Invoked by subclasses ; performs actual file roll . Tests to see whether roll * is necessary have already been performed , so just do it . */ final void roll ( final long timeForSuffix ) { } }
final File backupFile = this . prepareBackupFile ( timeForSuffix ) ; // close filename this . getAppender ( ) . closeFile ( ) ; // rename filename on disk to filename + suffix ( + number ) this . doFileRoll ( this . getAppender ( ) . getIoFile ( ) , backupFile ) ; // setup new file ' filename ' this . getAppender ( ) . openFile ( ) ; this . fireFileRollEvent ( new FileRollEvent ( this , backupFile ) ) ;
public class GitlabAPI { /** * Get a list of projects of perPage elements accessible by the authenticated user given page offset * @ param user Gitlab User to invoke sudo with * @ param page Page offset * @ param perPage Number of elements to get after page offset * @ return A list of gitlab projects * @ throws IOException Gitlab API call error */ public List < GitlabProject > getProjectsViaSudoWithPagination ( GitlabUser user , int page , int perPage ) throws IOException { } }
Pagination pagination = new Pagination ( ) . withPage ( page ) . withPerPage ( perPage ) ; return getProjectsViaSudoWithPagination ( user , pagination ) ;
public class Positions { /** * Get a writable position of type { @ code type # value } . * @ param typed not { @ code null } * @ return Position . Writable */ public static < T > Position . Writable < T > writable ( final Typed < T > typed ) { } }
return writable ( typed , noop ( ) ) ;
public class GVRTextureCapturer { /** * Updates the backing render texture . This method should not * be called when capturing is in progress . * @ param width The width of the backing texture in pixels . * @ param height The height of the backing texture in pixels . * @ param sampleCount The MSAA sample count . */ public void update ( int width , int height , int sampleCount ) { } }
if ( capturing ) { throw new IllegalStateException ( "Cannot update backing texture while capturing" ) ; } this . width = width ; this . height = height ; if ( sampleCount == 0 ) captureTexture = new GVRRenderTexture ( getGVRContext ( ) , width , height ) ; else captureTexture = new GVRRenderTexture ( getGVRContext ( ) , width , height , sampleCount ) ; setRenderTexture ( captureTexture ) ; readBackBuffer = new int [ width * height ] ;
public class Blob { /** * { @ inheritDoc } * @ throw SQLFeatureNotSupportedException if this BLOB is empty */ public OutputStream setBinaryStream ( final long pos ) throws SQLException { } }
synchronized ( this ) { if ( this . underlying == null ) { if ( pos > 1 ) { throw new SQLException ( "Invalid position: " + pos ) ; } // end of if throw new SQLFeatureNotSupportedException ( "Cannot write to empty BLOB" ) ; } // end of if return this . underlying . setBinaryStream ( pos ) ; } // end of sync
public class TagValue { /** * Gets the bytes . * @ param i the i * @ param j the j * @ return the bytes */ public int getBytesBigEndian ( int i , int j ) { } }
int result = 0 ; for ( int k = i ; k < i + j ; k ++ ) { result += value . get ( k ) . toUint ( ) ; if ( k + 1 < i + j ) result <<= 8 ; } return result ;
public class QueryCreator { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( language != null ) { request . addPostParam ( "Language" , language ) ; } if ( query != null ) { request . addPostParam ( "Query" , query ) ; } if ( tasks != null ) { request . addPostParam ( "Tasks" , tasks ) ; } if ( modelBuild != null ) { request . addPostParam ( "ModelBuild" , modelBuild . toString ( ) ) ; } if ( field != null ) { request . addPostParam ( "Field" , field ) ; }
public class Saml2ClientMetadataController { /** * Gets idp metadata by name . * @ param client the client * @ return the service provider metadata by name */ @ GetMapping ( "/sp/{client}/idp/metadata" ) public ResponseEntity < String > getIdentityProviderMetadataByName ( @ PathVariable ( "client" ) final String client ) { } }
val saml2Client = ( SAML2Client ) builtClients . findClient ( client ) ; if ( saml2Client != null ) { return getSaml2ClientIdentityProviderMetadataResponseEntity ( saml2Client ) ; } return getNotAcceptableResponseEntity ( ) ;
public class AMRDefaultRuleProcessor { /** * Clone processor */ @ Override public Processor newProcessor ( Processor p ) { } }
AMRDefaultRuleProcessor oldProcessor = ( AMRDefaultRuleProcessor ) p ; Builder builder = new Builder ( oldProcessor ) ; AMRDefaultRuleProcessor newProcessor = builder . build ( ) ; newProcessor . resultStream = oldProcessor . resultStream ; newProcessor . ruleStream = oldProcessor . ruleStream ; return newProcessor ;
public class RePairRule { /** * Gets occurrences . * @ return all rule ' s occurrences . */ public int [ ] getOccurrences ( ) { } }
int [ ] res = new int [ this . occurrences . size ( ) ] ; for ( int i = 0 ; i < this . occurrences . size ( ) ; i ++ ) { res [ i ] = this . occurrences . get ( i ) ; } return res ;
public class BracketStage { /** * Parses the threshold to remove the matched braket . * No checks are performed against the passed in string : the object assumes * that the string is correct since the { @ link # canParse ( String ) } method * < b > must < / b > be called < b > before < / b > this method . * @ param threshold * The threshold to parse * @ param tc * The threshold config object . This object will be populated * according to the passed in threshold . * @ return the remaining part of the threshold */ @ Override public String parse ( final String threshold , final RangeConfig tc ) { } }
configure ( tc ) ; return threshold . substring ( 1 ) ;
public class FSNamesystem { /** * dumps the contents of recentInvalidateSets */ void dumpExcessReplicasSets ( PrintWriter out ) { } }
int size = excessReplicateMap . values ( ) . size ( ) ; out . println ( "Metasave: Excess blocks " + excessBlocksCount + " waiting deletion from " + size + " datanodes." ) ; if ( size == 0 ) { return ; } for ( Map . Entry < String , LightWeightHashSet < Block > > entry : excessReplicateMap . entrySet ( ) ) { LightWeightHashSet < Block > blocks = entry . getValue ( ) ; if ( blocks . size ( ) > 0 ) { out . println ( datanodeMap . get ( entry . getKey ( ) ) . getName ( ) ) ; blocks . printDetails ( out ) ; } }
public class InjectionTarget { /** * Set the parent InjectionBinding containing the information for this injection target * @ param injectionBinding */ public void setInjectionBinding ( InjectionBinding < ? > injectionBinding ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "setInjectionBinding : " + injectionBinding ) ; ivInjectionBinding = injectionBinding ;
public class DefaultComposer { /** * ( non - Javadoc ) * @ see org . jsmpp . util . PDUComposer # dataSmResp ( int , java . lang . String , * org . jsmpp . bean . OptionalParameter [ ] ) */ public byte [ ] dataSmResp ( int sequenceNumber , String messageId , OptionalParameter ... optionalParameters ) throws PDUStringException { } }
StringValidator . validateString ( messageId , StringParameter . MESSAGE_ID ) ; PDUByteBuffer buf = new PDUByteBuffer ( SMPPConstant . CID_DATA_SM_RESP , 0 , sequenceNumber ) ; buf . append ( messageId ) ; return buf . toBytes ( ) ;
public class AuditFieldAnnotationAttribute { /** * Gets the all params . * @ param method * the method * @ param arg1 * the arg1 * @ return the all params */ public List < Field > getAllFields ( final Method method , final Object [ ] arg1 ) { } }
final Annotation [ ] [ ] parameterAnnotations = method . getParameterAnnotations ( ) ; List < Field > actionItems = new ArrayList < Field > ( ) ; int i = 0 ; String paramName = null ; String paramValue = null ; Class < ? > paramType ; for ( final Annotation [ ] annotations : parameterAnnotations ) { final Object object = arg1 [ i ++ ] ; // paramValue = ObjectToStringSerializer . ( object ) ; paramType = object . getClass ( ) ; for ( final Annotation annotation : annotations ) { if ( annotation instanceof AuditField ) { final AuditField field = ( AuditField ) annotation ; paramName = field . field ( ) ; } else if ( annotation instanceof DeIdentify ) { final DeIdentify deidentify = ( DeIdentify ) annotation ; paramValue = DeIdentifyUtil . deidentify ( paramValue , deidentify . left ( ) , deidentify . right ( ) , deidentify . fromLeft ( ) , deidentify . fromRight ( ) ) ; } } if ( null == paramName ) { paramName = "arg" + ( i - 1 ) ; } Field field = new Field ( ) ; field . setName ( paramName ) ; field . setValue ( paramValue ) ; field . setType ( paramType . getName ( ) ) ; actionItems . add ( field ) ; paramName = null ; paramValue = null ; } return actionItems ;
public class JcrServiceImpl { /** * Reads properties of the given node . * @ param repository the repository name * @ param workspace the workspace name * @ param path the path to the node * @ param node the node instance * @ return list of node ' s properties . * @ throws RepositoryException */ private Collection < JcrProperty > getProperties ( String repository , String workspace , String path , Node node ) throws RepositoryException { } }
ArrayList < PropertyDefinition > names = new ArrayList < > ( ) ; NodeType primaryType = node . getPrimaryNodeType ( ) ; PropertyDefinition [ ] defs = primaryType . getPropertyDefinitions ( ) ; names . addAll ( Arrays . asList ( defs ) ) ; NodeType [ ] mixinType = node . getMixinNodeTypes ( ) ; for ( NodeType type : mixinType ) { defs = type . getPropertyDefinitions ( ) ; names . addAll ( Arrays . asList ( defs ) ) ; } ArrayList < JcrProperty > list = new ArrayList < > ( ) ; for ( PropertyDefinition def : names ) { String name = def . getName ( ) ; String type = PropertyType . nameFromValue ( def . getRequiredType ( ) ) ; Property p = null ; try { p = node . getProperty ( def . getName ( ) ) ; } catch ( PathNotFoundException e ) { } String display = values ( def , p ) ; String value = def . isMultiple ( ) ? multiValue ( p ) : singleValue ( p , def , repository , workspace , path ) ; list . add ( new JcrProperty ( name , type , value , display ) ) ; } return list ;
public class DatastoreImpl { /** * Find all instances by type in a different collection than what is mapped on the class given skipping some documents and returning a * fixed number of the remaining . * @ param collection The collection use when querying * @ param clazz the class to use for mapping the results * @ param property the document property to query against * @ param value the value to check for * @ param offset the number of results to skip * @ param size the maximum number of results to return * @ param validate if true , validate the query * @ param < T > the type to query * @ param < V > the type to filter value * @ return the query */ public < T , V > Query < T > find ( final String collection , final Class < T > clazz , final String property , final V value , final int offset , final int size , final boolean validate ) { } }
final Query < T > query = find ( collection , clazz ) ; if ( ! validate ) { query . disableValidation ( ) ; } query . offset ( offset ) ; query . limit ( size ) ; return query . filter ( property , value ) . enableValidation ( ) ;
public class MPXWriter { /** * This method is called to format a units value . * @ param value numeric value * @ return currency value */ private String formatUnits ( Number value ) { } }
return ( value == null ? null : m_formats . getUnitsDecimalFormat ( ) . format ( value . doubleValue ( ) / 100 ) ) ;
public class RpcWrapper { /** * The base functionality used by all NFS calls , which does basic return * code checking and throws an exception if this does not pass . Verbose * logging is also handled here . This method is not used by Portmap , Mount , * and Unmount calls . * @ param request * The request to send . * @ param responseHandler * A response handler . * @ param ipAddress * The IP address to use for communication . * @ throws IOException */ private void callRpcChecked ( S request , RpcResponseHandler < ? extends T > responseHandler , String ipAddress ) throws IOException { } }
LOG . debug ( "server {}, port {}, request {}" , _server , _port , request ) ; callRpcNaked ( request , responseHandler . getNewResponse ( ) , ipAddress ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "server {}, port {}, response {}" , _server , _port , responseHandler . getResponse ( ) ) ; } responseHandler . checkResponse ( request ) ;
public class EQL { /** * Prints the . * @ param _ queryBuilder the query builder * @ return the prints the */ public static Print print ( final Query _queryBuilder ) { } }
return ( Print ) org . efaps . eql2 . EQL2 . print ( _queryBuilder ) ;
public class ScreenAwtAbstract { /** * Link keyboard to the screen ( listening to ) . * @ param mouse The mouse reference . */ private void addMouseListener ( MouseAwt mouse ) { } }
componentForMouse . addMouseListener ( mouse . getClicker ( ) ) ; componentForMouse . addMouseMotionListener ( mouse . getMover ( ) ) ; componentForMouse . requestFocus ( ) ;
public class MessagePacker { /** * Writes header of an Array value . * You will call other packer methods for each element after this method call . * You don ' t have to call anything at the end of iteration . * @ param arraySize number of elements to be written * @ return this * @ throws IOException when underlying output throws IOException */ public MessagePacker packArrayHeader ( int arraySize ) throws IOException { } }
if ( arraySize < 0 ) { throw new IllegalArgumentException ( "array size must be >= 0" ) ; } if ( arraySize < ( 1 << 4 ) ) { writeByte ( ( byte ) ( FIXARRAY_PREFIX | arraySize ) ) ; } else if ( arraySize < ( 1 << 16 ) ) { writeByteAndShort ( ARRAY16 , ( short ) arraySize ) ; } else { writeByteAndInt ( ARRAY32 , arraySize ) ; } return this ;
public class UidManager { /** * Runs through the tsdb - uid table and removes TSMeta , UIDMeta and TSUID * counter entries from the table * The process is as follows : * < ul > < li > Fetch the max number of Metric UIDs < / li > * < li > Split the # of UIDs amongst worker threads < / li > * < li > Create a delete request with the qualifiers of any matching meta data * columns < / li > < / ul > * < li > Continue on to the next unprocessed timeseries data row < / li > < / ul > * @ param tsdb The tsdb to use for processing , including a search plugin * @ return 0 if completed successfully , something else if it dies */ private static int metaPurge ( final TSDB tsdb ) throws Exception { } }
final long start_time = System . currentTimeMillis ( ) / 1000 ; final long max_id = CliUtils . getMaxMetricID ( tsdb ) ; // now figure out how many IDs to divy up between the workers final int workers = Runtime . getRuntime ( ) . availableProcessors ( ) * 2 ; final double quotient = ( double ) max_id / ( double ) workers ; long index = 1 ; LOG . info ( "Max metric ID is [" + max_id + "]" ) ; LOG . info ( "Spooling up [" + workers + "] worker threads" ) ; final Thread [ ] threads = new Thread [ workers ] ; for ( int i = 0 ; i < workers ; i ++ ) { threads [ i ] = new MetaPurge ( tsdb , index , quotient , i ) ; threads [ i ] . setName ( "MetaSync # " + i ) ; threads [ i ] . start ( ) ; index += quotient ; if ( index < max_id ) { index ++ ; } } // wait till we ' re all done for ( int i = 0 ; i < workers ; i ++ ) { threads [ i ] . join ( ) ; LOG . info ( "[" + i + "] Finished" ) ; } // make sure buffered data is flushed to storage before exiting tsdb . flush ( ) . joinUninterruptibly ( ) ; final long duration = ( System . currentTimeMillis ( ) / 1000 ) - start_time ; LOG . info ( "Completed meta data synchronization in [" + duration + "] seconds" ) ; return 0 ;
public class DefaultTableMetaTSDBFactory { /** * 代理一下tableMetaTSDB的获取 , 使用隔离的spring定义 */ public TableMetaTSDB build ( String destination , String springXml ) { } }
return TableMetaTSDBBuilder . build ( destination , springXml ) ;
public class SARLOperationHelper { /** * Evalute the Pure annotatino adapters . * @ param operation the operation to adapt . * @ param context the context . * @ return { @ code true } if the pure annotation could be associated to the given operation . */ boolean evaluatePureAnnotationAdapters ( org . eclipse . xtext . common . types . JvmOperation operation , ISideEffectContext context ) { } }
int index = - 1 ; int i = 0 ; for ( final Adapter adapter : operation . eAdapters ( ) ) { if ( adapter . isAdapterForType ( AnnotationJavaGenerationAdapter . class ) ) { index = i ; break ; } ++ i ; } if ( index >= 0 ) { final AnnotationJavaGenerationAdapter annotationAdapter = ( AnnotationJavaGenerationAdapter ) operation . eAdapters ( ) . get ( index ) ; assert annotationAdapter != null ; return annotationAdapter . applyAdaptations ( this , operation , context ) ; } return false ;
public class IsaPog { /** * Write Isabelle theory files to disk for the model and proof obligations * @ param path * Path to the directory to write the files to . Must end with the { @ link File # separatorChar } * @ return true if write is successful * @ throws IOException */ public Boolean writeThyFiles ( String path ) throws IOException { } }
File modelThyFile = new File ( path + modelThyName ) ; FileUtils . writeStringToFile ( modelThyFile , modelThy . getContent ( ) ) ; File posThyFile = new File ( path + posThyName ) ; FileUtils . writeStringToFile ( posThyFile , posThy ) ; return true ;
public class SimplePipelineRev803 { /** * Run a sequence of { @ link AnalysisEngine analysis engines } over a { @ link CAS } . This method * does not { @ link AnalysisEngine # destroy ( ) destroy } the engines or send them other events like * { @ link AnalysisEngine # collectionProcessComplete ( ) } . This is left to the caller . * @ param cas * the CAS to process * @ param engines * a sequence of analysis engines to run on the jCas * @ throws UIMAException * @ throws IOException */ public static void runPipeline ( final CAS cas , final AnalysisEngine ... engines ) throws UIMAException , IOException { } }
if ( engines . length == 0 ) { return ; } CasIterator casIter = engines [ 0 ] . processAndOutputNewCASes ( cas ) ; AnalysisEngine [ ] enginesRemains = Arrays . copyOfRange ( engines , 1 , engines . length ) ; while ( casIter . hasNext ( ) ) { CAS nextCas = casIter . next ( ) ; runPipeline ( nextCas , enginesRemains ) ; nextCas . release ( ) ; }
public class DeviceAttribute_3DAODefaultImpl { private void buildAttributeValueObject ( final String name ) { } }
attrval . name = name ; attrval . quality = AttrQuality . ATTR_VALID ; attrval . time = new TimeVal ( ) ; attrval . r_dim = new AttributeDim ( ) ; attrval . w_dim = new AttributeDim ( ) ; attrval . r_dim . dim_x = 1 ; attrval . r_dim . dim_y = 0 ; attrval . w_dim . dim_x = 0 ; attrval . w_dim . dim_y = 0 ; try { attrval . value = ApiUtil . get_orb ( ) . create_any ( ) ; } catch ( final DevFailed e ) { } final long now = System . currentTimeMillis ( ) ; attrval . time . tv_sec = ( int ) ( now / 1000 ) ; attrval . time . tv_usec = ( int ) ( now - attrval . time . tv_sec * 1000 ) * 1000 ; attrval . time . tv_nsec = 0 ; attrval . err_list = null ;
public class MetricsTags { /** * Generate segment tags ( string array ) on the input fully qualified segment name to be associated with a metric . * @ param qualifiedSegmentName fully qualified segment name . * @ return string array as segment tag of metric . */ public static String [ ] segmentTags ( String qualifiedSegmentName ) { } }
Preconditions . checkNotNull ( qualifiedSegmentName ) ; String [ ] tags = { TAG_SCOPE , null , TAG_STREAM , null , TAG_SEGMENT , null , TAG_EPOCH , null } ; if ( qualifiedSegmentName . contains ( TABLE_SEGMENT_DELIMITER ) ) { String [ ] tokens = qualifiedSegmentName . split ( TABLE_SEGMENT_DELIMITER ) ; tags [ 1 ] = tokens [ 0 ] ; tags [ 3 ] = TABLES ; tags [ 5 ] = tokens [ 1 ] ; tags [ 7 ] = "0" ; return tags ; } String segmentBaseName = getSegmentBaseName ( qualifiedSegmentName ) ; String [ ] tokens = segmentBaseName . split ( "[/]" ) ; int segmentIdIndex = tokens . length == 2 ? 1 : 2 ; if ( tokens [ segmentIdIndex ] . contains ( EPOCH_DELIMITER ) ) { String [ ] segmentIdTokens = tokens [ segmentIdIndex ] . split ( EPOCH_DELIMITER ) ; tags [ 5 ] = segmentIdTokens [ 0 ] ; tags [ 7 ] = segmentIdTokens [ 1 ] ; } else { tags [ 5 ] = tokens [ segmentIdIndex ] ; tags [ 7 ] = "0" ; } if ( tokens . length == 3 ) { tags [ 1 ] = tokens [ 0 ] ; tags [ 3 ] = tokens [ 1 ] ; } else { tags [ 1 ] = "default" ; tags [ 3 ] = tokens [ 0 ] ; } return tags ;
public class Config { /** * Sets the map of job tracker configurations , mapped by config name . * The config name may be a pattern with which the configuration will be * obtained in the future . * @ param jobTrackerConfigs the job tracker configuration map to set * @ return this config instance */ public Config setJobTrackerConfigs ( Map < String , JobTrackerConfig > jobTrackerConfigs ) { } }
this . jobTrackerConfigs . clear ( ) ; this . jobTrackerConfigs . putAll ( jobTrackerConfigs ) ; for ( final Entry < String , JobTrackerConfig > entry : this . jobTrackerConfigs . entrySet ( ) ) { entry . getValue ( ) . setName ( entry . getKey ( ) ) ; } return this ;