signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class TargetsApi { /** * Acknowledge missed calls * Acknowledge missed calls in the list of recent targets . * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > ackRecentMissedCallsWithHttpInfo ( ) throws ApiException { } }
com . squareup . okhttp . Call call = ackRecentMissedCallsValidateBeforeCall ( null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class ModelsImpl { /** * Create an entity role for an entity in the application . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId The entity model ID . * @ param createPrebuiltEntityRoleOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the UUID object */ public Observable < UUID > createPrebuiltEntityRoleAsync ( UUID appId , String versionId , UUID entityId , CreatePrebuiltEntityRoleOptionalParameter createPrebuiltEntityRoleOptionalParameter ) { } }
return createPrebuiltEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , createPrebuiltEntityRoleOptionalParameter ) . map ( new Func1 < ServiceResponse < UUID > , UUID > ( ) { @ Override public UUID call ( ServiceResponse < UUID > response ) { return response . body ( ) ; } } ) ;
public class XBELValidatorServiceImpl { /** * { @ inheritDoc } */ @ Override public void validate ( String s ) throws ValidationError { } }
try { xv . validate ( s ) ; } catch ( SAXParseException e ) { final String name = e . getSystemId ( ) ; final String msg = e . getMessage ( ) ; final int line = e . getLineNumber ( ) ; final int column = e . getColumnNumber ( ) ; throw new ValidationError ( name , msg , e , line , column ) ; } catch ( SAXException e ) { // TODO This isn ' t the intended design here throw new RuntimeException ( e ) ; } catch ( IOException e ) { // TODO This isn ' t the intended design here throw new RuntimeException ( e ) ; }
public class AllocationRecorder { /** * Adds a { @ link Sampler } that will get run < b > every time an allocation is performed from Java * code < / b > . Use this with < b > extreme < / b > judiciousness ! * @ param sampler The sampler to add . */ public static void addSampler ( Sampler sampler ) { } }
synchronized ( samplerLock ) { Sampler [ ] samplers = additionalSamplers ; /* create a new list of samplers from the old , adding this sampler */ if ( samplers != null ) { Sampler [ ] newSamplers = new Sampler [ samplers . length + 1 ] ; System . arraycopy ( samplers , 0 , newSamplers , 0 , samplers . length ) ; newSamplers [ samplers . length ] = sampler ; additionalSamplers = newSamplers ; } else { Sampler [ ] newSamplers = new Sampler [ 1 ] ; newSamplers [ 0 ] = sampler ; additionalSamplers = newSamplers ; } }
public class CompareUtil { /** * { @ code null } 安全的对象比较 * @ param < T > 被比较对象类型 ( 必须实现Comparable接口 ) * @ param c1 对象1 , 可以为 { @ code null } * @ param c2 对象2 , 可以为 { @ code null } * @ param isNullGreater 当被比较对象为null时是否排在前面 * @ return 比较结果 , 如果c1 & lt ; c2 , 返回数小于0 , c1 = = c2返回0 , c1 & gt ; c2 大于0 * @ see java . util . Comparator # compare ( Object , Object ) */ public static < T extends Comparable < ? super T > > int compare ( T c1 , T c2 , boolean isNullGreater ) { } }
if ( c1 == c2 ) { return 0 ; } else if ( c1 == null ) { return isNullGreater ? 1 : - 1 ; } else if ( c2 == null ) { return isNullGreater ? - 1 : 1 ; } return c1 . compareTo ( c2 ) ;
public class Counters { /** * Transform log space values into a probability distribution in place . On the * assumption that the values in the Counter are in log space , this method * calculates their sum , and then subtracts the log of their sum from each * element . That is , if a counter has keys c1 , c2 , c3 with values v1 , v2 , v3, * the value of c1 becomes v1 - log ( e ^ v1 + e ^ v2 + e ^ v3 ) . After this , e ^ v1 + * e ^ v2 + e ^ v3 = 1.0 , so Counters . logSum ( c ) = 0.0 ( approximately ) . * @ param c * The Counter to log normalize in place */ public static < E > void logNormalizeInPlace ( Counter < E > c ) { } }
double logsum = logSum ( c ) ; // for ( E key : c . keySet ( ) ) { // c . incrementCount ( key , - logsum ) ; // This should be faster for ( Map . Entry < E , Double > e : c . entrySet ( ) ) { e . setValue ( e . getValue ( ) . doubleValue ( ) - logsum ) ; }
public class VehicleManager { /** * Register to receive a callback when a message with the given key is * received . * @ param key The key you want to receive updates . * @ param listener An listener instance to receive the callback . */ public void addListener ( MessageKey key , VehicleMessage . Listener listener ) { } }
addListener ( ExactKeyMatcher . buildExactMatcher ( key ) , listener ) ;
public class XPathParser { /** * Consume an expected token , throwing an exception if it * isn ' t there . * @ param expected The string to be expected . * @ throws javax . xml . transform . TransformerException */ private final void consumeExpected ( String expected ) throws javax . xml . transform . TransformerException { } }
if ( tokenIs ( expected ) ) { nextToken ( ) ; } else { error ( XPATHErrorResources . ER_EXPECTED_BUT_FOUND , new Object [ ] { expected , m_token } ) ; // " Expected " + expected + " , but found : " + m _ token ) ; // Patch for Christina ' s gripe . She wants her errorHandler to return from // this error and continue trying to parse , rather than throwing an exception . // Without the patch , that put us into an endless loop . throw new XPathProcessorException ( CONTINUE_AFTER_FATAL_ERROR ) ; }
public class FeatureServiceImpl { /** * Retrieves the current sprint ' s detail for a given team . * @ param componentId * The ID of the related UI component that will reference * collector item content from this collector * @ param teamId * A given scope - owner ' s source - system ID * @ return A data response list of type Feature containing several relevant * sprint fields for the current team ' s sprint */ @ Override public DataResponse < List < Feature > > getCurrentSprintDetail ( ObjectId componentId , String teamId , String projectId , Optional < String > agileType ) { } }
Component component = componentRepository . findOne ( componentId ) ; if ( ( component == null ) || CollectionUtils . isEmpty ( component . getCollectorItems ( ) ) || CollectionUtils . isEmpty ( component . getCollectorItems ( ) . get ( CollectorType . AgileTool ) ) || ( component . getCollectorItems ( ) . get ( CollectorType . AgileTool ) . get ( 0 ) == null ) ) { return getEmptyLegacyDataResponse ( ) ; } CollectorItem item = component . getCollectorItems ( ) . get ( CollectorType . AgileTool ) . get ( 0 ) ; // Get teamId first from available collector item , based on component List < Feature > sprintResponse = getFeaturesForCurrentSprints ( teamId , projectId , item . getCollectorId ( ) , agileType . isPresent ( ) ? agileType . get ( ) : null , true ) ; Collector collector = collectorRepository . findOne ( item . getCollectorId ( ) ) ; return new DataResponse < > ( sprintResponse , collector . getLastExecuted ( ) ) ;
public class NestedParser { /** * TODO create a version that allows for multi character operators */ public List < Object > parse ( final String input , final List < String > operators ) { } }
final List < Object > result = new ArrayList < Object > ( ) ; if ( operators . size ( ) != 0 ) { final boolean innerLoop = operators . size ( ) == 1 ; final MiniParser currentParser = innerLoop ? INNER_MINI_PARSER : MINI_PARSER ; final String operator = operators . get ( 0 ) ; final List < String > segments ; if ( operator . length ( ) == 1 ) { segments = currentParser . split ( input , operator . charAt ( 0 ) ) ; } else if ( operator . length ( ) == 2 ) { List < String > allSegments = currentParser . scan ( input , String . valueOf ( operator . charAt ( 0 ) ) , String . valueOf ( operator . charAt ( 1 ) ) , true ) ; if ( allSegments . size ( ) > 0 ) { // the first part is not processed any further result . add ( allSegments . get ( 0 ) ) ; } if ( allSegments . size ( ) > 1 ) { segments = allSegments . subList ( 1 , allSegments . size ( ) ) ; } else { segments = Collections . emptyList ( ) ; } } else { throw new IllegalArgumentException ( "Operators must either be start/end pairs or single characters" ) ; } if ( innerLoop ) { result . addAll ( segments ) ; } else { for ( String segment : segments ) { List < Object > parse = parse ( segment , operators . subList ( 1 , operators . size ( ) ) ) ; if ( parse . size ( ) == 1 && parse . get ( 0 ) instanceof String ) { result . add ( parse . get ( 0 ) ) ; } else { result . add ( parse ) ; } } } } return result ;
public class CRFClassifier { /** * Used to get the default supplied classifier inside the jar file . THIS * FUNCTION WILL ONLY WORK IF THE CODE WAS LOADED FROM A JAR FILE WHICH HAS A * SERIALIZED CLASSIFIER STORED INSIDE IT . * @ return The default CRFClassifier in the jar file ( if there is one ) */ public static < IN extends CoreMap > CRFClassifier < IN > getDefaultClassifier ( ) { } }
CRFClassifier < IN > crf = new CRFClassifier < IN > ( ) ; crf . loadDefaultClassifier ( ) ; return crf ;
public class JobSchedulesImpl { /** * Updates the properties of the specified job schedule . * This fully replaces all the updatable properties of the job schedule . For example , if the schedule property is not specified with this request , then the Batch service will remove the existing schedule . Changes to a job schedule only impact jobs created by the schedule after the update has taken place ; currently running jobs are unaffected . * @ param jobScheduleId The ID of the job schedule to update . * @ param jobScheduleUpdateParameter The parameters for the request . * @ param jobScheduleUpdateOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws BatchErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void update ( String jobScheduleId , JobScheduleUpdateParameter jobScheduleUpdateParameter , JobScheduleUpdateOptions jobScheduleUpdateOptions ) { } }
updateWithServiceResponseAsync ( jobScheduleId , jobScheduleUpdateParameter , jobScheduleUpdateOptions ) . toBlocking ( ) . single ( ) . body ( ) ;
public class OEntityManager { /** * Sets the received handler as default and merges the classes all together . * @ param iClassHandler */ public synchronized void setClassHandler ( final OEntityManagerClassHandler iClassHandler ) { } }
for ( Entry < String , Class < ? > > entry : classHandler . getClassesEntrySet ( ) ) { iClassHandler . registerEntityClass ( entry . getValue ( ) ) ; } this . classHandler = iClassHandler ;
public class BuildController { /** * Creates a link between a build and another * @ param buildId From this build . . . * @ param targetBuildId . . . to this build * @ return List of builds */ @ RequestMapping ( value = "builds/{buildId}/links/{targetBuildId}" , method = RequestMethod . PUT ) public Build addBuildLink ( @ PathVariable ID buildId , @ PathVariable ID targetBuildId ) { } }
Build build = structureService . getBuild ( buildId ) ; Build targetBuild = structureService . getBuild ( targetBuildId ) ; structureService . addBuildLink ( build , targetBuild ) ; return build ;
public class UserSettingsImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < ModelComparePluginConfiguration > getModelCompares ( ) { } }
return ( EList < ModelComparePluginConfiguration > ) eGet ( StorePackage . Literals . USER_SETTINGS__MODEL_COMPARES , true ) ;
public class Messages { /** * Optionally formats a message for the requested language with * { @ link java . text . MessageFormat } . * @ param message * @ param language * @ param args * @ return the message */ private String formatMessage ( String message , String language , Object ... args ) { } }
if ( args != null && args . length > 0 ) { // only format a message if we have arguments Locale locale = languages . getLocaleOrDefault ( language ) ; MessageFormat messageFormat = new MessageFormat ( message , locale ) ; return messageFormat . format ( args ) ; } return message ;
public class UntagLogGroupRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UntagLogGroupRequest untagLogGroupRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( untagLogGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( untagLogGroupRequest . getLogGroupName ( ) , LOGGROUPNAME_BINDING ) ; protocolMarshaller . marshall ( untagLogGroupRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GroupsApi { /** * Gets a list of users in a group . * Retrieves a list of users in a group . * @ param accountId The external account number ( int ) or account ID Guid . ( required ) * @ param groupId The ID of the group being accessed . ( required ) * @ return UsersResponse */ public UsersResponse listGroupUsers ( String accountId , String groupId ) throws ApiException { } }
return listGroupUsers ( accountId , groupId , null ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/relief/2.0" , name = "_GenericApplicationPropertyOfRasterRelief" ) public JAXBElement < Object > create_GenericApplicationPropertyOfRasterRelief ( Object value ) { } }
return new JAXBElement < Object > ( __GenericApplicationPropertyOfRasterRelief_QNAME , Object . class , null , value ) ;
public class ns_conf_download_policy { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
ns_conf_download_policy_responses result = ( ns_conf_download_policy_responses ) service . get_payload_formatter ( ) . string_to_resource ( ns_conf_download_policy_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . ns_conf_download_policy_response_array ) ; } ns_conf_download_policy [ ] result_ns_conf_download_policy = new ns_conf_download_policy [ result . ns_conf_download_policy_response_array . length ] ; for ( int i = 0 ; i < result . ns_conf_download_policy_response_array . length ; i ++ ) { result_ns_conf_download_policy [ i ] = result . ns_conf_download_policy_response_array [ i ] . ns_conf_download_policy [ 0 ] ; } return result_ns_conf_download_policy ;
public class ComponentRegister { /** * Get instant run dex path , used to catch the branch usingApkSplits = false . */ private static List < String > loadInstantRunDexFile ( ApplicationInfo appInfo ) { } }
List < String > instantRunDexPaths = new ArrayList < > ( ) ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . LOLLIPOP && appInfo . splitSourceDirs != null ) { instantRunDexPaths . addAll ( Arrays . asList ( appInfo . splitSourceDirs ) ) ; Log . i ( AndServer . TAG , "InstantRun support was found." ) ; } else { try { // Reflect instant run sdk to find where is the dex file . Class pathsByInstantRun = Class . forName ( "com.android.tools.fd.runtime.Paths" ) ; Method getDexFileDirectory = pathsByInstantRun . getMethod ( "getDexFileDirectory" , String . class ) ; String dexDirectory = ( String ) getDexFileDirectory . invoke ( null , appInfo . packageName ) ; File dexFolder = new File ( dexDirectory ) ; if ( dexFolder . exists ( ) && dexFolder . isDirectory ( ) ) { File [ ] dexFiles = dexFolder . listFiles ( ) ; for ( File file : dexFiles ) { if ( file . exists ( ) && file . isFile ( ) && file . getName ( ) . endsWith ( ".dex" ) ) { instantRunDexPaths . add ( file . getAbsolutePath ( ) ) ; } } Log . i ( AndServer . TAG , "InstantRun support was found." ) ; } } catch ( ClassNotFoundException e ) { Log . i ( AndServer . TAG , "InstantRun support was not found." ) ; } catch ( Exception e ) { Log . w ( AndServer . TAG , "Finding InstantRun failed." , e ) ; } } return instantRunDexPaths ;
public class AbstractWComponent { /** * Create and return an error diagnostic associated to the given error source . * @ param source the source of the error . * @ param message the error message , using { @ link MessageFormat } syntax . * @ param args optional arguments for the message . * @ return an error diagnostic for this component . */ protected Diagnostic createErrorDiagnostic ( final WComponent source , final String message , final Serializable ... args ) { } }
return new DiagnosticImpl ( Diagnostic . ERROR , source , message , args ) ;
public class Nd4jBlas { /** * Returns the BLAS library vendor * @ return the BLAS library vendor */ @ Override public Vendor getBlasVendor ( ) { } }
int vendor = getBlasVendorId ( ) ; boolean isUnknowVendor = ( ( vendor > Vendor . values ( ) . length - 1 ) || ( vendor <= 0 ) ) ; if ( isUnknowVendor ) { return Vendor . UNKNOWN ; } return Vendor . values ( ) [ vendor ] ;
public class CmsJlanNetworkFile { /** * Gets the file information record . < p > * @ return the file information for this file * @ throws IOException if reading the file information fails */ public FileInfo getFileInfo ( ) throws IOException { } }
try { load ( false ) ; if ( m_resource . isFile ( ) ) { // Fill in a file information object for this file / directory long flen = m_resource . getLength ( ) ; // long alloc = ( flen + 512L ) & 0xFFFFFE00L ; long alloc = flen ; int fattr = 0 ; if ( m_cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) ) { fattr += FileAttribute . ReadOnly ; } // Create the file information FileInfo finfo = new FileInfo ( m_resource . getName ( ) , flen , fattr ) ; long fdate = m_resource . getDateLastModified ( ) ; finfo . setModifyDateTime ( fdate ) ; finfo . setAllocationSize ( alloc ) ; finfo . setFileId ( m_resource . getStructureId ( ) . hashCode ( ) ) ; finfo . setCreationDateTime ( m_resource . getDateCreated ( ) ) ; finfo . setChangeDateTime ( fdate ) ; return finfo ; } else { // Fill in a file information object for this directory int fattr = FileAttribute . Directory ; if ( m_cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) ) { fattr += FileAttribute . ReadOnly ; } // Can ' t use negative file size here , since this stops Windows 7 from connecting FileInfo finfo = new FileInfo ( m_resource . getName ( ) , 1 , fattr ) ; long fdate = m_resource . getDateLastModified ( ) ; finfo . setModifyDateTime ( fdate ) ; finfo . setAllocationSize ( 1 ) ; finfo . setFileId ( m_resource . getStructureId ( ) . hashCode ( ) ) ; finfo . setCreationDateTime ( m_resource . getDateCreated ( ) ) ; finfo . setChangeDateTime ( fdate ) ; return finfo ; } } catch ( CmsException e ) { throw CmsJlanDiskInterface . convertCmsException ( e ) ; }
public class Router { /** * Specify a middleware that will be called for all HTTP methods * @ param pattern The simple pattern * @ param handler The middleware to call */ public Router all ( @ NotNull final String pattern , @ NotNull final Middleware ... handler ) { } }
get ( pattern , handler ) ; put ( pattern , handler ) ; post ( pattern , handler ) ; delete ( pattern , handler ) ; options ( pattern , handler ) ; head ( pattern , handler ) ; trace ( pattern , handler ) ; connect ( pattern , handler ) ; patch ( pattern , handler ) ; return this ;
public class ClientFactory { /** * Creates a transfer message sender asynchronously . This sender sends message to destination entity via another entity . * This is mainly to be used when sending messages in a transaction . * When messages need to be sent across entities in a single transaction , this can be used to ensure * all the messages land initially in the same entity / partition for local transactions , and then * let service bus handle transferring the message to the actual destination . * @ param messagingFactory messaging factory ( which represents a connection ) on which sender needs to be created . * @ param entityPath path of the final destination of the message . * @ param viaEntityPath The initial destination of the message . * @ return a CompletableFuture representing the pending creating of IMessageSender instance . */ public static CompletableFuture < IMessageSender > createTransferMessageSenderFromEntityPathAsync ( MessagingFactory messagingFactory , String entityPath , String viaEntityPath ) { } }
Utils . assertNonNull ( "messagingFactory" , messagingFactory ) ; MessageSender sender = new MessageSender ( messagingFactory , viaEntityPath , entityPath , null ) ; return sender . initializeAsync ( ) . thenApply ( ( v ) -> sender ) ;
public class HazelcastPortableMessageFormatter { /** * Method to append writing of a field to hazelcast _ portable . * @ param field JField to write . * < pre > * { @ code * if ( isSetBooleanValues ( ) ) { * portableWriter . writeBooleanArray ( " booleanValues " , com . google . common . primitives . Booleans . toArray ( mBooleanValues . build ( ) ) ) ; * < / pre > */ private void writeDefaultPortableFieldList ( JField field , PDescriptor descriptor ) throws GeneratorException { } }
switch ( descriptor . getType ( ) ) { case BYTE : writer . formatln ( "%s.writeByteArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , helper . getValueType ( descriptor ) ) ; break ; case BINARY : writer . formatln ( "%s.writeByteArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , "byte" ) ; // TODO becomes binary otherwise , and doesn ' t fit with byte array . break ; case BOOL : writer . formatln ( "%s.writeBooleanArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , helper . getValueType ( descriptor ) ) ; break ; case DOUBLE : writer . formatln ( "%s.writeDoubleArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , helper . getValueType ( descriptor ) ) ; break ; case ENUM : writer . formatln ( "%s.writeIntArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , int . class . getName ( ) ) ; // TODO need fixed as value isn ' t doable . break ; case I16 : writer . formatln ( "%s.writeShortArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , helper . getValueType ( descriptor ) ) ; break ; case I32 : writer . formatln ( "%s.writeIntArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , helper . getValueType ( descriptor ) ) ; break ; case I64 : writer . formatln ( "%s.writeLongArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , helper . getValueType ( descriptor ) ) ; break ; case STRING : writer . formatln ( "%s.writeUTFArray(\"%s\", new %s[0]);" , PORTABLE_WRITER , field . name ( ) , helper . getValueType ( descriptor ) ) ; break ; case MESSAGE : writer . formatln ( "%s.writePortableArray(\"%s\", new %s._Builder[0]);" , PORTABLE_WRITER , field . name ( ) , helper . getValueType ( descriptor ) ) ; break ; default : throw new GeneratorException ( "Not implemented writeDefaultPortableFieldList for list with type: " + descriptor . getType ( ) + " in " + this . getClass ( ) . getSimpleName ( ) ) ; }
public class MeshGenerator { /** * Generates a plane on xy . The center is at the middle of the plane . * @ param size The size of the plane to generate , on x and y * @ return The vertex data */ public static VertexData generatePlane ( Vector2f size ) { } }
final VertexData destination = new VertexData ( ) ; final VertexAttribute positionsAttribute = new VertexAttribute ( "positions" , DataType . FLOAT , 3 ) ; destination . addAttribute ( 0 , positionsAttribute ) ; final TFloatList positions = new TFloatArrayList ( ) ; final VertexAttribute normalsAttribute = new VertexAttribute ( "normals" , DataType . FLOAT , 3 ) ; destination . addAttribute ( 1 , normalsAttribute ) ; final TFloatList normals = new TFloatArrayList ( ) ; final VertexAttribute textureCoordsAttribute = new VertexAttribute ( "textureCoords" , DataType . FLOAT , 2 ) ; destination . addAttribute ( 2 , textureCoordsAttribute ) ; final TFloatList textureCoords = new TFloatArrayList ( ) ; final TIntList indices = destination . getIndices ( ) ; // Generate the mesh generatePlane ( positions , normals , textureCoords , indices , size ) ; // Put the mesh in the vertex data positionsAttribute . setData ( positions ) ; normalsAttribute . setData ( normals ) ; textureCoordsAttribute . setData ( textureCoords ) ; return destination ;
public class DruidQuery { /** * Return this query as a Timeseries query , or null if this query is not compatible with Timeseries . * @ return query */ @ Nullable public TimeseriesQuery toTimeseriesQuery ( ) { } }
if ( grouping == null || grouping . getHavingFilter ( ) != null ) { return null ; } final Granularity queryGranularity ; final boolean descending ; int timeseriesLimit = 0 ; if ( grouping . getDimensions ( ) . isEmpty ( ) ) { queryGranularity = Granularities . ALL ; descending = false ; } else if ( grouping . getDimensions ( ) . size ( ) == 1 ) { final DimensionExpression dimensionExpression = Iterables . getOnlyElement ( grouping . getDimensions ( ) ) ; queryGranularity = Expressions . toQueryGranularity ( dimensionExpression . getDruidExpression ( ) , plannerContext . getExprMacroTable ( ) ) ; if ( queryGranularity == null ) { // Timeseries only applies if the single dimension is granular _ _ time . return null ; } if ( limitSpec != null ) { // If there is a limit spec , set timeseriesLimit to given value if less than Integer . Max _ VALUE if ( limitSpec . isLimited ( ) ) { timeseriesLimit = limitSpec . getLimit ( ) ; } if ( limitSpec . getColumns ( ) . isEmpty ( ) ) { descending = false ; } else { // We ' re ok if the first order by is time ( since every time value is distinct , the rest of the columns // wouldn ' t matter anyway ) . final OrderByColumnSpec firstOrderBy = limitSpec . getColumns ( ) . get ( 0 ) ; if ( firstOrderBy . getDimension ( ) . equals ( dimensionExpression . getOutputName ( ) ) ) { // Order by time . descending = firstOrderBy . getDirection ( ) == OrderByColumnSpec . Direction . DESCENDING ; } else { // Order by something else . return null ; } } } else { // No limitSpec . descending = false ; } } else { // More than one dimension , timeseries cannot handle . return null ; } final Filtration filtration = Filtration . create ( filter ) . optimize ( sourceQuerySignature ) ; final List < PostAggregator > postAggregators = new ArrayList < > ( grouping . getPostAggregators ( ) ) ; if ( sortProject != null ) { postAggregators . addAll ( sortProject . getPostAggregators ( ) ) ; } final Map < String , Object > theContext = new HashMap < > ( ) ; theContext . put ( "skipEmptyBuckets" , true ) ; theContext . putAll ( plannerContext . getQueryContext ( ) ) ; return new TimeseriesQuery ( dataSource , filtration . getQuerySegmentSpec ( ) , descending , getVirtualColumns ( false ) , filtration . getDimFilter ( ) , queryGranularity , grouping . getAggregatorFactories ( ) , postAggregators , timeseriesLimit , ImmutableSortedMap . copyOf ( theContext ) ) ;
public class BufferedRandomAccessFile { /** * Write at most " len " bytes to " b " starting at position " off " , and return * the number of bytes written . */ private int writeAtMost ( byte [ ] b , int off , int len ) throws IOException { } }
if ( this . curr_ >= this . hi_ ) { if ( this . hitEOF_ && this . hi_ < this . maxHi_ ) { // at EOF - - bump " hi " this . hi_ = this . maxHi_ ; } else { // slow path - - write current buffer ; read next one this . seek ( this . curr_ ) ; if ( this . curr_ == this . hi_ ) { // appending to EOF - - bump " hi " this . hi_ = this . maxHi_ ; } } } len = Math . min ( len , ( int ) ( this . hi_ - this . curr_ ) ) ; int buffOff = ( int ) ( this . curr_ - this . lo_ ) ; System . arraycopy ( b , off , this . buff_ , buffOff , len ) ; this . curr_ += len ; return len ;
public class ListPipelinesResult { /** * The pipeline identifiers . If you require additional information about the pipelines , you can use these * identifiers to call < a > DescribePipelines < / a > and < a > GetPipelineDefinition < / a > . * @ param pipelineIdList * The pipeline identifiers . If you require additional information about the pipelines , you can use these * identifiers to call < a > DescribePipelines < / a > and < a > GetPipelineDefinition < / a > . */ public void setPipelineIdList ( java . util . Collection < PipelineIdName > pipelineIdList ) { } }
if ( pipelineIdList == null ) { this . pipelineIdList = null ; return ; } this . pipelineIdList = new com . amazonaws . internal . SdkInternalList < PipelineIdName > ( pipelineIdList ) ;
public class Encdec { /** * Encode floats */ public static int enc_floatle ( float f , byte [ ] dst , int di ) { } }
return enc_uint32le ( Float . floatToIntBits ( f ) , dst , di ) ;
public class HttpTemplate { /** * Reads an InputStream as a String allowing for different encoding types . This closes the stream at the end . * @ param inputStream The input stream * @ param connection The HTTP connection object * @ return A String representation of the input stream * @ throws IOException If something goes wrong */ String readInputStreamAsEncodedString ( InputStream inputStream , HttpURLConnection connection ) throws IOException { } }
if ( inputStream == null ) { return null ; } BufferedReader reader = null ; try { String responseEncoding = getResponseEncoding ( connection ) ; if ( izGzipped ( connection ) ) { inputStream = new GZIPInputStream ( inputStream ) ; } final InputStreamReader in = responseEncoding != null ? new InputStreamReader ( inputStream , responseEncoding ) : new InputStreamReader ( inputStream , CHARSET_UTF_8 ) ; reader = new BufferedReader ( in ) ; StringBuilder sb = new StringBuilder ( ) ; for ( String line ; ( line = reader . readLine ( ) ) != null ; ) { sb . append ( line ) ; } return sb . toString ( ) ; } finally { inputStream . close ( ) ; if ( reader != null ) { try { reader . close ( ) ; } catch ( IOException ignore ) { } } }
public class SemEvalCorpusReader { /** * { @ inheritDoc } */ public Iterator < Document > read ( File file ) { } }
try { return read ( new FileReader ( file ) ) ; } catch ( FileNotFoundException fnfe ) { throw new IOError ( fnfe ) ; }
public class CmsSecurityManager { /** * Creates a new resource with the provided content and properties . < p > * An exception is thrown if a resource with the given name already exists . < p > * @ param context the current request context * @ param resourcePath the name of the resource to create ( full path ) * @ param resource the new resource to create * @ param content the content for the new resource * @ param properties the properties for the new resource * @ return the created resource * @ throws CmsVfsResourceAlreadyExistsException if a resource with the given name already exists * @ throws CmsVfsException if the project in the given database context is the " Online " project * @ throws CmsException if something goes wrong */ public CmsResource createResource ( CmsRequestContext context , String resourcePath , CmsResource resource , byte [ ] content , List < CmsProperty > properties ) throws CmsVfsResourceAlreadyExistsException , CmsVfsException , CmsException { } }
if ( existsResource ( context , resourcePath , CmsResourceFilter . IGNORE_EXPIRATION ) ) { // check if the resource already exists by name throw new CmsVfsResourceAlreadyExistsException ( org . opencms . db . generic . Messages . get ( ) . container ( org . opencms . db . generic . Messages . ERR_RESOURCE_WITH_NAME_ALREADY_EXISTS_1 , resource . getRootPath ( ) ) ) ; } CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; CmsResource newResource = null ; try { checkOfflineProject ( dbc ) ; newResource = m_driverManager . createResource ( dbc , resourcePath , resource , content , properties , false ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_IMPORT_RESOURCE_2 , context . getSitePath ( resource ) , resourcePath ) , e ) ; } finally { dbc . clear ( ) ; } return newResource ;
public class ResourceLoader { /** * Gets all resources with given name at the given source URL . If the URL points to a directory , the name is the * file path relative to this directory . If the URL points to a JAR file , the name identifies an entry in that JAR * file . If the URL points to a JAR file , the resource is not found in that JAR file , and the JAR file has * Class - Path attribute , the JAR files identified in the Class - Path are also searched for the resource . * The search is lazy , that is , " find next resource " operation is triggered by calling * { @ link Enumeration # hasMoreElements } . * @ param source the source URL * @ param name the resource name * @ return enumeration of resource handles representing the resources */ public Enumeration < ResourceHandle > getResources ( URL source , String name ) { } }
return new ResourceEnumeration < > ( new URL [ ] { source } , name , false ) ;
public class CDownloadRequest { /** * If no progress listener has been set , return the byte sink defined in constructor , otherwise decorate it . * @ return the byte sink to be used for download operations . */ public ByteSink getByteSink ( ) { } }
ByteSink bs = byteSink ; if ( progressListener != null ) { bs = new ProgressByteSink ( bs , progressListener ) ; } return bs ;
public class MetaDataTypeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case DroolsPackage . META_DATA_TYPE__META_VALUE : return getMetaValue ( ) ; case DroolsPackage . META_DATA_TYPE__NAME : return getName ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class AWSStorageGatewayClient { /** * Deletes Challenge - Handshake Authentication Protocol ( CHAP ) credentials for a specified iSCSI target and initiator * pair . * @ param deleteChapCredentialsRequest * A JSON object containing one or more of the following fields : < / p > * < ul > * < li > * < a > DeleteChapCredentialsInput $ InitiatorName < / a > * < / li > * < li > * < a > DeleteChapCredentialsInput $ TargetARN < / a > * < / li > * @ return Result of the DeleteChapCredentials operation returned by the service . * @ throws InvalidGatewayRequestException * An exception occurred because an invalid gateway request was issued to the service . For more information , * see the error and message fields . * @ throws InternalServerErrorException * An internal server error has occurred during the request . For more information , see the error and message * fields . * @ sample AWSStorageGateway . DeleteChapCredentials * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / storagegateway - 2013-06-30 / DeleteChapCredentials " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteChapCredentialsResult deleteChapCredentials ( DeleteChapCredentialsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteChapCredentials ( request ) ;
public class LTriBoolFunctionBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */ @ Nonnull public static < R > LTriBoolFunctionBuilder < R > triBoolFunction ( Consumer < LTriBoolFunction < R > > consumer ) { } }
return new LTriBoolFunctionBuilder ( consumer ) ;
public class EpiLur { /** * / * ( non - Javadoc ) * @ see com . att . cadi . Lur # clear ( java . security . Principal , java . lang . StringBuilder ) */ @ Override public void clear ( Principal p , StringBuilder report ) { } }
for ( Lur lur : lurs ) { lur . clear ( p , report ) ; }
public class ContinuedFraction { /** * Uses Thompson and Barnett ' s modified Lentz ' s algorithm create an * approximation that should be accurate to full precision . * @ param args the numeric inputs to the continued fraction * @ return the approximate value of the continued fraction */ public double lentz ( double ... args ) { } }
double f_n = getB ( 0 , args ) ; if ( f_n == 0.0 ) f_n = 1e-30 ; double c_n , c_0 = f_n ; double d_n , d_0 = 0 ; double delta = 0 ; int j = 0 ; while ( Math . abs ( delta - 1 ) > 1e-15 ) { j ++ ; d_n = getB ( j , args ) + getA ( j , args ) * d_0 ; if ( d_n == 0.0 ) d_n = 1e-30 ; c_n = getB ( j , args ) + getA ( j , args ) / c_0 ; if ( c_n == 0.0 ) c_n = 1e-30 ; d_n = 1 / d_n ; delta = c_n * d_n ; f_n *= delta ; d_0 = d_n ; c_0 = c_n ; } return f_n ;
public class LObjIntByteFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T , R > LObjIntByteFunction < T , R > objIntByteFunctionFrom ( Consumer < LObjIntByteFunctionBuilder < T , R > > buildingFunction ) { } }
LObjIntByteFunctionBuilder builder = new LObjIntByteFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class log { /** * Private Logger function to handle Log calls * @ param level level of the log message * @ param message log output * @ param throwable */ private static int logger ( int level , String message , Throwable throwable ) { } }
if ( QuickUtils . shouldShowLogs ( ) ) { switch ( level ) { case QuickUtils . DEBUG : return android . util . Log . d ( QuickUtils . TAG , message , throwable ) ; case QuickUtils . VERBOSE : return android . util . Log . v ( QuickUtils . TAG , message , throwable ) ; case QuickUtils . INFO : return android . util . Log . i ( QuickUtils . TAG , message , throwable ) ; case QuickUtils . WARN : return android . util . Log . w ( QuickUtils . TAG , message , throwable ) ; case QuickUtils . ERROR : return android . util . Log . e ( QuickUtils . TAG , message , throwable ) ; default : break ; } } return - 1 ;
public class EntityTypesClient { /** * Returns the list of all entity types in the specified agent . * < p > Sample code : * < pre > < code > * try ( EntityTypesClient entityTypesClient = EntityTypesClient . create ( ) ) { * ProjectAgentName parent = ProjectAgentName . of ( " [ PROJECT ] " ) ; * for ( EntityType element : entityTypesClient . listEntityTypes ( parent ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param parent Required . The agent to list all entity types from . Format : ` projects / & lt ; Project * ID & gt ; / agent ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListEntityTypesPagedResponse listEntityTypes ( ProjectAgentName parent ) { } }
ListEntityTypesRequest request = ListEntityTypesRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . build ( ) ; return listEntityTypes ( request ) ;
public class RewindableReader { /** * Rewinds the reader such that the initial characters are returned when invoking read ( ) . * Throws an exception if more than the buffering limit has already been read . * @ throws IOException */ public void rewind ( ) throws IOException { } }
if ( ! rewindable ) { throw LOG . unableToRewindReader ( ) ; } wrappedReader . unread ( buffer , 0 , pos ) ; pos = 0 ;
public class DrlParser { /** * This will expand the DRL . useful for debugging . * @ param source - * the source which use a DSL * @ param dsl - * the DSL itself . * @ throws DroolsParserException * If unable to expand in any way . */ public String getExpandedDRL ( final String source , final Reader dsl ) throws DroolsParserException { } }
DefaultExpanderResolver resolver = getDefaultResolver ( dsl ) ; return getExpandedDRL ( source , resolver ) ;
public class InsertExtractUtils { /** * Extract read values to an object for SCALAR , SPECTRUM and IMAGE * @ param da * @ return single value for SCALAR , array of primitives for SPECTRUM , 2D array of primitives for IMAGE * @ throws DevFailed */ public static Object extractRead ( final DeviceAttribute da , final AttrDataFormat format ) throws DevFailed { } }
if ( da == null ) { throw DevFailedUtils . newDevFailed ( ERROR_MSG_DA ) ; } return InsertExtractFactory . getAttributeExtractor ( da . getType ( ) ) . extractRead ( da , format ) ;
public class DefaultParser { /** * Handles an unknown token . If the token starts with a dash an * UnrecognizedOptionException is thrown . Otherwise the token is added * to the arguments of the command line . If the stopAtNonOption flag * is set , this stops the parsing and the remaining tokens are added * as - is in the arguments of the command line . * @ param token the command line token to handle */ private void handleUnknownToken ( String token ) throws ParseException { } }
if ( token . startsWith ( "-" ) && token . length ( ) > 1 && ! stopAtNonOption ) { throw new UnrecognizedOptionException ( "Unrecognized option: " + token , token ) ; } cmd . addArg ( token ) ; if ( stopAtNonOption ) { skipParsing = true ; }
public class Twilio { /** * Validate that we can connect to the new SSL certificate posted on api . twilio . com . * @ throws com . twilio . exception . CertificateValidationException if the connection fails */ public static void validateSslCertificate ( ) { } }
final NetworkHttpClient client = new NetworkHttpClient ( ) ; final Request request = new Request ( HttpMethod . GET , "https://api.twilio.com:8443" ) ; try { final Response response = client . makeRequest ( request ) ; if ( ! TwilioRestClient . SUCCESS . apply ( response . getStatusCode ( ) ) ) { throw new CertificateValidationException ( "Unexpected response from certificate endpoint" , request , response ) ; } } catch ( final ApiException e ) { throw new CertificateValidationException ( "Could not get response from certificate endpoint" , request ) ; }
public class XmlRepositoryFactory { /** * Reads XML document and returns it content as list of query names * @ param document document which would be read * @ return list of queries names */ private static List < String > getElementsId ( Document document ) { } }
List < String > result = new ArrayList < String > ( ) ; List < Element > elementList = getElements ( document ) ; result = getElementsId ( elementList ) ; return result ;
public class Mediawiki { /** * create the given user account * @ param name * @ param eMail * @ param realname * @ param mailpassword * @ param reason * @ param language * @ throws Exception */ public Api createAccount ( String name , String eMail , String realname , boolean mailpassword , String reason , String language ) throws Exception { } }
String createtoken = "?" ; if ( getVersion ( ) . compareToIgnoreCase ( "Mediawiki 1.27" ) >= 0 ) { Api apiResult = this . getQueryResult ( "&meta=tokens&type=createaccount" ) ; super . handleError ( apiResult ) ; createtoken = apiResult . getQuery ( ) . getTokens ( ) . getCreateaccounttoken ( ) ; } Api api = null ; if ( getVersion ( ) . compareToIgnoreCase ( "Mediawiki 1.27" ) >= 0 ) { Map < String , String > lFormData = new HashMap < String , String > ( ) ; lFormData . put ( "createtoken" , createtoken ) ; lFormData . put ( "username" , name ) ; lFormData . put ( "email" , eMail ) ; lFormData . put ( "realname" , realname ) ; lFormData . put ( "mailpassword" , mailpassword ? "1" : "0" ) ; lFormData . put ( "reason" , reason ) ; lFormData . put ( "createcontinue" , "1" ) ; String params = "" ; api = getActionResult ( "createaccount" , params , null , lFormData ) ; } else { String params = "&name=" + this . encode ( name ) ; params += "&email=" + this . encode ( eMail ) ; params += "&realname=" + this . encode ( realname ) ; params += "&mailpassword=" + mailpassword ; params += "&reason=" + this . encode ( reason ) ; params += "&token=" ; api = getActionResult ( "createaccount" , params ) ; handleError ( api ) ; String token = api . getCreateaccount ( ) . getToken ( ) ; params += token ; api = getActionResult ( "createaccount" , params ) ; } return api ;
public class DomainFinalResultHandler { private boolean isDomainOperation ( final ModelNode operation ) { } }
final PathAddress address = PathAddress . pathAddress ( operation . require ( OP_ADDR ) ) ; return address . size ( ) == 0 || ! address . getElement ( 0 ) . getKey ( ) . equals ( HOST ) ;
public class Token { /** * This method is deprecated . Please use { @ link # generateToken ( byte [ ] , String , String . . . ) } instead * Generate a token string with secret key , ID and optionally payloads * @ param secret the secret to encrypt to token string * @ param oid the ID of the token ( could be customer ID etc ) * @ param payload the payload optionally indicate more information * @ return an encrypted token string that is expiring in { @ link Life # SHORT } time period */ @ Deprecated public static String generateToken ( String secret , String oid , String ... payload ) { } }
return generateToken ( secret , Life . SHORT , oid , payload ) ;
public class DeleteCertificateRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteCertificateRequest deleteCertificateRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteCertificateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteCertificateRequest . getCertificateId ( ) , CERTIFICATEID_BINDING ) ; protocolMarshaller . marshall ( deleteCertificateRequest . getForceDelete ( ) , FORCEDELETE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StubObject { /** * Create a { @ link StubObject } using the provided user ID and a new object ID * @ param sUserID * User ID * @ return Never < code > null < / code > . */ @ Nonnull public static StubObject createForUser ( @ Nullable final String sUserID ) { } }
return new StubObject ( GlobalIDFactory . getNewPersistentStringID ( ) , sUserID , null ) ;
public class HpelTraceServiceConfig { /** * Retrieves process ID for the current process . * @ return pid string retrieved from the process name . */ public static String getPid ( ) { } }
if ( pid == null ) { String runtimeName = ManagementFactory . getRuntimeMXBean ( ) . getName ( ) ; if ( runtimeName == null ) { pid = "unknown" ; } else { int index = runtimeName . indexOf ( '@' ) ; if ( index < 0 ) { pid = runtimeName ; } else { pid = runtimeName . substring ( 0 , index ) ; } } } return pid ;
public class FileGetPropertiesFromComputeNodeHeaders { /** * Set the file creation time . * @ param ocpCreationTime the ocpCreationTime value to set * @ return the FileGetPropertiesFromComputeNodeHeaders object itself . */ public FileGetPropertiesFromComputeNodeHeaders withOcpCreationTime ( DateTime ocpCreationTime ) { } }
if ( ocpCreationTime == null ) { this . ocpCreationTime = null ; } else { this . ocpCreationTime = new DateTimeRfc1123 ( ocpCreationTime ) ; } return this ;
public class SignalUtils { /** * Handle { @ literal INT } signals by calling the specified { @ link Runnable } . * @ param runnable the runnable to call on SIGINT . */ public static void attachSignalHandler ( final Runnable runnable ) { } }
Signal . handle ( SIG_INT , new SignalHandler ( ) { @ Override public void handle ( Signal signal ) { runnable . run ( ) ; } } ) ;
public class URICertStore { /** * Creates a CertStore from information included in the AccessDescription * object of a certificate ' s Authority Information Access Extension . */ static CertStore getInstance ( AccessDescription ad ) { } }
if ( ! ad . getAccessMethod ( ) . equals ( ( Object ) AccessDescription . Ad_CAISSUERS_Id ) ) { return null ; } GeneralNameInterface gn = ad . getAccessLocation ( ) . getName ( ) ; if ( ! ( gn instanceof URIName ) ) { return null ; } URI uri = ( ( URIName ) gn ) . getURI ( ) ; try { return URICertStore . getInstance ( new URICertStore . URICertStoreParameters ( uri ) ) ; } catch ( Exception ex ) { if ( debug != null ) { debug . println ( "exception creating CertStore: " + ex ) ; ex . printStackTrace ( ) ; } return null ; }
public class DateUtil { /** * Adds a number of seconds to a date returning a new object . * The original { @ code Date } is unchanged . * @ param date the date , not null * @ param amount the amount to add , may be negative * @ return the new { @ code Date } with the amount added * @ throws IllegalArgumentException if the date is null */ public static < T extends java . util . Date > T addSeconds ( final T date , final int amount ) { } }
return roll ( date , amount , CalendarUnit . SECOND ) ;
public class Clock { /** * Sets the current second of the clock * @ param SECOND */ public void setSecond ( final int SECOND ) { } }
second = SECOND % 60 ; calculateAngles ( hour , minute , second ) ; repaint ( getInnerBounds ( ) ) ;
public class RefinePolyLineCorner { /** * Given segment information create a line in general notation which has been normalized */ private void createLine ( int index0 , int index1 , List < Point2D_I32 > contour , LineGeneral2D_F64 line ) { } }
if ( index1 < 0 ) System . out . println ( "SHIT" ) ; Point2D_I32 p0 = contour . get ( index0 ) ; Point2D_I32 p1 = contour . get ( index1 ) ; // System . out . println ( " createLine " + p0 + " " + p1 ) ; work . a . set ( p0 . x , p0 . y ) ; work . b . set ( p1 . x , p1 . y ) ; UtilLine2D_F64 . convert ( work , line ) ; // ensure A * A + B * B = 1 line . normalize ( ) ;
public class ProxyThread { /** * Tells whether or not the given { @ code address } is a public address of the host , when behind NAT . * Returns { @ code false } if the proxy is not behind NAT . * < strong > Implementation Note : < / strong > Only AWS EC2 NAT detection is supported , by requesting the public IP address from * < a href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / using - instance - addressing . html # working - with - ip - addresses " > * AWS EC2 instance ' s metadata < / a > . * @ param address the address that will be checked * @ return { @ code true } if the address is public address of the host , { @ code false } otherwise . * @ see ProxyParam # isBehindNat ( ) */ private boolean isOwnPublicAddress ( InetAddress address ) { } }
if ( ! proxyParam . isBehindNat ( ) ) { return false ; } // Support just AWS for now . TransportAddress publicAddress = getAwsCandidateHarvester ( ) . getMask ( ) ; if ( publicAddress == null ) { return false ; } return Arrays . equals ( address . getAddress ( ) , publicAddress . getAddress ( ) . getAddress ( ) ) ;
public class DockerUtils { /** * Finds an image by tag . * @ param imageTag the image tag ( not null ) * @ param images a non - null list of images * @ return an image , or null if none was found */ public static Image findImageByTag ( String imageTag , List < Image > images ) { } }
Image result = null ; for ( Image img : images ) { String [ ] tags = img . getRepoTags ( ) ; if ( tags == null ) continue ; for ( String s : tags ) { if ( s . contains ( imageTag ) ) { result = img ; break ; } } } return result ;
public class SqlTableSession { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . paros . TableSession # update ( long , java . lang . String ) */ @ Override public synchronized void update ( long sessionId , String sessionName ) throws DatabaseException { } }
SqlPreparedStatementWrapper psUpdate = null ; try { psUpdate = DbSQL . getSingleton ( ) . getPreparedStatement ( "session.ps.update" ) ; psUpdate . getPs ( ) . setLong ( 2 , sessionId ) ; psUpdate . getPs ( ) . setString ( 1 , sessionName ) ; psUpdate . getPs ( ) . executeUpdate ( ) ; } catch ( SQLException e ) { throw new DatabaseException ( e ) ; } finally { DbSQL . getSingleton ( ) . releasePreparedStatement ( psUpdate ) ; }
public class DatabaseConnectionPoliciesInner { /** * Gets a database ' s connection policy , which is used with table auditing . Table auditing is deprecated , use blob auditing instead . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database for which the connection policy is defined . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DatabaseConnectionPolicyInner object */ public Observable < DatabaseConnectionPolicyInner > getAsync ( String resourceGroupName , String serverName , String databaseName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , serverName , databaseName ) . map ( new Func1 < ServiceResponse < DatabaseConnectionPolicyInner > , DatabaseConnectionPolicyInner > ( ) { @ Override public DatabaseConnectionPolicyInner call ( ServiceResponse < DatabaseConnectionPolicyInner > response ) { return response . body ( ) ; } } ) ;
public class ECKey { /** * Decrypt cipher by AES in SIC ( also know as CTR ) mode * @ param cipher * - proper cipher * @ return decrypted cipher , equal length to the cipher . * @ deprecated should not use EC private scalar value as an AES key */ public byte [ ] decryptAES ( byte [ ] cipher ) { } }
if ( privKey == null ) { throw new MissingPrivateKeyException ( ) ; } if ( ! ( privKey instanceof BCECPrivateKey ) ) { throw new UnsupportedOperationException ( "Cannot use the private key as an AES key" ) ; } AESFastEngine engine = new AESFastEngine ( ) ; SICBlockCipher ctrEngine = new SICBlockCipher ( engine ) ; KeyParameter key = new KeyParameter ( BigIntegers . asUnsignedByteArray ( ( ( BCECPrivateKey ) privKey ) . getD ( ) ) ) ; ParametersWithIV params = new ParametersWithIV ( key , new byte [ 16 ] ) ; ctrEngine . init ( false , params ) ; int i = 0 ; byte [ ] out = new byte [ cipher . length ] ; while ( i < cipher . length ) { ctrEngine . processBlock ( cipher , i , out , i ) ; i += engine . getBlockSize ( ) ; if ( cipher . length - i < engine . getBlockSize ( ) ) break ; } // process left bytes if ( cipher . length - i > 0 ) { byte [ ] tmpBlock = new byte [ 16 ] ; System . arraycopy ( cipher , i , tmpBlock , 0 , cipher . length - i ) ; ctrEngine . processBlock ( tmpBlock , 0 , tmpBlock , 0 ) ; System . arraycopy ( tmpBlock , 0 , out , i , cipher . length - i ) ; } return out ;
public class InstrumentationFactory { /** * If < b > ibm < / b > is false , this private method will create a new URLClassLoader and attempt to load * the com . sun . tools . attach . VirtualMachine class from the provided toolsJar file . * If < b > ibm < / b > is true , this private method will ignore the toolsJar parameter and load the * com . ibm . tools . attach . VirtualMachine class . * @ return The AttachAPI VirtualMachine class < br > * or null if something unexpected happened . */ private static Class < ? > loadVMClass ( Resource toolsJar , Log log , JavaVendor vendor ) { } }
try { ClassLoader loader = ClassLoader . getSystemClassLoader ( ) ; String cls = vendor . getVirtualMachineClassName ( ) ; // if ( ! vendor . isIBM ( ) ) { loader = new URLClassLoader ( new URL [ ] { ( ( FileResource ) toolsJar ) . toURI ( ) . toURL ( ) } , loader ) ; return loader . loadClass ( cls ) ; } catch ( Exception e ) { log . log ( Log . LEVEL_INFO , "Instrumentation" , e ) ; } return null ;
public class AbstractSeeker { /** * Reset * @ param newTolerance * @ param newAction */ public void reset ( double newTolerance , Runnable newAction ) { } }
this . tolerance = newTolerance ; this . action = newAction ; done = false ;
public class AmazonCloudDirectoryClient { /** * Allows a schema to be updated using JSON upload . Only available for development schemas . See < a * href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / schemas _ jsonformat . html # schemas _ json " > JSON * Schema Format < / a > for more information . * @ param putSchemaFromJsonRequest * @ return Result of the PutSchemaFromJson operation returned by the service . * @ throws InternalServiceException * Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in * which case you can retry your request until it succeeds . Otherwise , go to the < a * href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any * operational issues with the service . * @ throws InvalidArnException * Indicates that the provided ARN value is not valid . * @ throws RetryableConflictException * Occurs when a conflict with a previous successful write is detected . For example , if a write operation * occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this * exception may result . This generally occurs when the previous write did not have time to propagate to the * host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to * this exception . * @ throws ValidationException * Indicates that your request is malformed in some manner . See the exception message . * @ throws LimitExceededException * Indicates that limits are exceeded . See < a * href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more * information . * @ throws AccessDeniedException * Access denied . Check your permissions . * @ throws InvalidSchemaDocException * Indicates that the provided < code > SchemaDoc < / code > value is not valid . * @ throws InvalidRuleException * Occurs when any of the rule parameter keys or values are invalid . * @ sample AmazonCloudDirectory . PutSchemaFromJson * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / PutSchemaFromJson " * target = " _ top " > AWS API Documentation < / a > */ @ Override public PutSchemaFromJsonResult putSchemaFromJson ( PutSchemaFromJsonRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePutSchemaFromJson ( request ) ;
public class JBBPOut { /** * Write bits from a value into the output stream * @ param numberOfBits the number of bits to be saved * @ param value the value which bits must be saved * @ return the DSL session * @ throws IOException it will be thrown for transport errors */ public JBBPOut Bits ( final JBBPBitNumber numberOfBits , final int value ) throws IOException { } }
assertNotEnded ( ) ; JBBPUtils . assertNotNull ( numberOfBits , "Number of bits must not be null" ) ; if ( this . processCommands ) { _writeBits ( numberOfBits , value ) ; } return this ;
public class ELImageInputTagBeanInfo { /** * ( non - Javadoc ) * @ see java . beans . SimpleBeanInfo # getPropertyDescriptors ( ) */ @ Override public PropertyDescriptor [ ] getPropertyDescriptors ( ) { } }
List < PropertyDescriptor > proplist = new ArrayList < > ( ) ; try { proplist . add ( new PropertyDescriptor ( "base64" , ELImageInputTag . class , null , "setBase64Expr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "align" , ELImageInputTag . class , null , "setAlignExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "alt" , ELImageInputTag . class , null , "setAltExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "border" , ELImageInputTag . class , null , "setBorderExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "dir" , ELImageInputTag . class , null , "setDirExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "disabled" , ELImageInputTag . class , null , "setDisabledExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "lang" , ELImageInputTag . class , null , "setLangExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onblur" , ELImageInputTag . class , null , "setOnblurExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onchange" , ELImageInputTag . class , null , "setOnchangeExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onclick" , ELImageInputTag . class , null , "setOnclickExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "ondblclick" , ELImageInputTag . class , null , "setOndblclickExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onfocus" , ELImageInputTag . class , null , "setOnfocusExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onkeydown" , ELImageInputTag . class , null , "setOnkeydownExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onkeypress" , ELImageInputTag . class , null , "setOnkeypressExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onkeyup" , ELImageInputTag . class , null , "setOnkeyupExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onmousedown" , ELImageInputTag . class , null , "setOnmousedownExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onmousemove" , ELImageInputTag . class , null , "setOnmousemoveExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onmouseout" , ELImageInputTag . class , null , "setOnmouseoutExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onmouseover" , ELImageInputTag . class , null , "setOnmouseoverExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "onmouseup" , ELImageInputTag . class , null , "setOnmouseupExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "src" , ELImageInputTag . class , null , "setSrcExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "style" , ELImageInputTag . class , null , "setStyleExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "styleClass" , ELImageInputTag . class , null , "setStyleClassExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "styleId" , ELImageInputTag . class , null , "setStyleIdExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "tabindex" , ELImageInputTag . class , null , "setTabindexExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "title" , ELImageInputTag . class , null , "setTitleExpr" ) ) ; } catch ( IntrospectionException ex ) { } try { proplist . add ( new PropertyDescriptor ( "value" , ELImageInputTag . class , null , "setValueExpr" ) ) ; } catch ( IntrospectionException ex ) { } PropertyDescriptor [ ] result = new PropertyDescriptor [ proplist . size ( ) ] ; return ( ( PropertyDescriptor [ ] ) proplist . toArray ( result ) ) ;
public class NodeUtils { /** * Determines if parent node has children that are all leaves */ public static boolean parentContainsOnlyLeaves ( ParentNode parentNode ) { } }
for ( Node child : parentNode . children ( ) ) { if ( ! isLeaf ( child ) ) return false ; } return true ;
public class Blast { /** * Decode PKWare Compression Library stream . * Format notes : * - First byte is 0 if literals are uncoded or 1 if they are coded . Second * byte is 4 , 5 , or 6 for the number of extra bits in the distance code . * This is the base - 2 logarithm of the dictionary size minus six . * - Compressed data is a combination of literals and length / distance pairs * terminated by an end code . Literals are either Huffman coded or * uncoded bytes . A length / distance pair is a coded length followed by a * coded distance to represent a string that occurs earlier in the * uncompressed data that occurs again at the current location . * - A bit preceding a literal or length / distance pair indicates which comes * next , 0 for literals , 1 for length / distance . * - If literals are uncoded , then the next eight bits are the literal , in the * normal bit order in the stream , i . e . no bit - reversal is needed . Similarly , * no bit reversal is needed for either the length extra bits or the distance * extra bits . * - Literal bytes are simply written to the output . A length / distance pair is * an instruction to copy previously uncompressed bytes to the output . The * copy is from distance bytes back in the output stream , copying for length * bytes . * - Distances pointing before the beginning of the output data are not * permitted . * - Overlapped copies , where the length is greater than the distance , are * allowed and common . For example , a distance of one and a length of 518 * simply copies the last byte 518 times . A distance of four and a length of * twelve copies the last four bytes three times . A simple forward copy * ignoring whether the length is greater than the distance or not implements * this correctly . * @ param input InputStream instance * @ param output OutputStream instance * @ return status code */ public int blast ( InputStream input , OutputStream output ) throws IOException { } }
m_input = input ; m_output = output ; int lit ; /* true if literals are coded */ int dict ; /* log2 ( dictionary size ) - 6 */ int symbol ; /* decoded symbol , extra bits for distance */ int len ; /* length for copy */ int dist ; /* distance for copy */ int copy ; /* copy counter */ // unsigned char * from , * to ; / * copy pointers * / /* read header */ lit = bits ( 8 ) ; if ( lit > 1 ) { return - 1 ; } dict = bits ( 8 ) ; if ( dict < 4 || dict > 6 ) { return - 2 ; } /* decode literals and length / distance pairs */ do { if ( bits ( 1 ) != 0 ) { /* get length */ symbol = decode ( LENCODE ) ; len = BASE [ symbol ] + bits ( EXTRA [ symbol ] ) ; if ( len == 519 ) { break ; /* end code */ } /* get distance */ symbol = len == 2 ? 2 : dict ; dist = decode ( DISTCODE ) << symbol ; dist += bits ( symbol ) ; dist ++ ; if ( m_first != 0 && dist > m_next ) { return - 3 ; /* distance too far back */ } /* copy length bytes from distance bytes back */ do { // to = m _ out + m _ next ; int to = m_next ; int from = to - dist ; copy = MAXWIN ; if ( m_next < dist ) { from += copy ; copy = dist ; } copy -= m_next ; if ( copy > len ) { copy = len ; } len -= copy ; m_next += copy ; do { // * to + + = * from + + ; m_out [ to ++ ] = m_out [ from ++ ] ; } while ( -- copy != 0 ) ; if ( m_next == MAXWIN ) { // if ( s - > outfun ( s - > outhow , s - > out , s - > next ) ) return 1; m_output . write ( m_out , 0 , m_next ) ; m_next = 0 ; m_first = 0 ; } } while ( len != 0 ) ; } else { /* get literal and write it */ symbol = lit != 0 ? decode ( LITCODE ) : bits ( 8 ) ; m_out [ m_next ++ ] = ( byte ) symbol ; if ( m_next == MAXWIN ) { // if ( s - > outfun ( s - > outhow , s - > out , s - > next ) ) return 1; m_output . write ( m_out , 0 , m_next ) ; m_next = 0 ; m_first = 0 ; } } } while ( true ) ; if ( m_next != 0 ) { m_output . write ( m_out , 0 , m_next ) ; } return 0 ;
public class ModeledConnectionGroup { /** * Returns the maximum number of connections that should be allowed to this * connection group overall . If no limit applies , zero is returned . * @ return * The maximum number of connections that should be allowed to this * connection group overall , or zero if no limit applies . * @ throws GuacamoleException * If an error occurs while parsing the concurrency limit properties * specified within guacamole . properties . */ public int getMaxConnections ( ) throws GuacamoleException { } }
// Pull default from environment if connection limit is unset Integer value = getModel ( ) . getMaxConnections ( ) ; if ( value == null ) return environment . getDefaultMaxGroupConnections ( ) ; // Otherwise use defined value return value ;
public class WebSphereCDIDeploymentImpl { /** * Shutdown and clean up the whole deployment . The deployment will not be usable after this call has been made . */ @ Override public void shutdown ( ) { } }
if ( this . bootstrap != null ) { AccessController . doPrivileged ( new PrivilegedAction < Void > ( ) { @ Override public Void run ( ) { bootstrap . shutdown ( ) ; return null ; } } ) ; this . bootstrap = null ; this . deploymentDBAs . clear ( ) ; this . applicationBDAs . clear ( ) ; this . classloader = null ; this . extensionClassLoaders . clear ( ) ; this . cdiEnabled = false ; this . cdiStatusMap . clear ( ) ; this . application = null ; this . classBDAMap . clear ( ) ; }
public class SqlHelper { /** * Example 中包含至少 1 个查询条件 * @ param parameterName 参数名 * @ return */ public static String exampleHasAtLeastOneCriteriaCheck ( String parameterName ) { } }
StringBuilder sql = new StringBuilder ( ) ; sql . append ( "<bind name=\"exampleHasAtLeastOneCriteriaCheck\" value=\"@tk.mybatis.mapper.util.OGNL@exampleHasAtLeastOneCriteriaCheck(" ) ; sql . append ( parameterName ) . append ( ")\"/>" ) ; return sql . toString ( ) ;
public class ThingGroupDocument { /** * Parent group names . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setParentGroupNames ( java . util . Collection ) } or { @ link # withParentGroupNames ( java . util . Collection ) } if you * want to override the existing values . * @ param parentGroupNames * Parent group names . * @ return Returns a reference to this object so that method calls can be chained together . */ public ThingGroupDocument withParentGroupNames ( String ... parentGroupNames ) { } }
if ( this . parentGroupNames == null ) { setParentGroupNames ( new java . util . ArrayList < String > ( parentGroupNames . length ) ) ; } for ( String ele : parentGroupNames ) { this . parentGroupNames . add ( ele ) ; } return this ;
public class WorkbenchEntry { /** * Adds the given visit state to the visit - state queue , and adds this entry to the workbench if it was empty * and not { @ linkplain # acquired } . * @ param visitState the nonempty visit state that must be added to the visit - state queue . * @ param workbench the workbench . * @ see PriorityQueue # add ( Object ) */ public synchronized void add ( VisitState visitState , Workbench workbench ) { } }
assert ! visitState . isEmpty ( ) : visitState ; if ( ASSERTS ) if ( visitStates . contains ( visitState ) ) LOGGER . error ( "Visit state " + visitState + " already in this workbench entry (" + Arrays . asList ( Thread . currentThread ( ) . getStackTrace ( ) ) ) ; final boolean wasEmpty = isEmpty ( ) ; add ( visitState ) ; if ( wasEmpty && ! acquired ) workbench . add ( this ) ;
public class MockEC2QueryHandler { /** * Handles " createTags " request and create new Tags . * @ param resourcesSet List of resourceIds . * @ param tagSet Map for key , value of tags . * @ return a CreateTagsResponseType with Status of Tags . */ private CreateTagsResponseType createTags ( final List < String > resourcesSet , final Map < String , String > tagSet ) { } }
CreateTagsResponseType ret = new CreateTagsResponseType ( ) ; ret . setRequestId ( UUID . randomUUID ( ) . toString ( ) ) ; mockTagsController . createTags ( resourcesSet , tagSet ) ; ret . setReturn ( true ) ; return ret ;
public class Application { /** * This option is for advanced users only . This is meta information about third - party applications that third - party * vendors use for testing purposes . * @ return This option is for advanced users only . This is meta information about third - party applications that * third - party vendors use for testing purposes . */ public java . util . Map < String , String > getAdditionalInfo ( ) { } }
if ( additionalInfo == null ) { additionalInfo = new com . amazonaws . internal . SdkInternalMap < String , String > ( ) ; } return additionalInfo ;
public class CmsXmlContentPropertyHelper { /** * Returns a sitemap or VFS path given a sitemap entry id or structure id . < p > * This method first tries to read a sitemap entry with the given id . If this succeeds , * the sitemap entry ' s sitemap path will be returned . If it fails , the method interprets * the id as a structure id and tries to read the corresponding resource , and then returns * its VFS path . < p > * @ param cms the CMS context * @ param id a sitemap entry id or structure id * @ return a sitemap or VFS uri * @ throws CmsException if something goes wrong */ public static String getUriForId ( CmsObject cms , CmsUUID id ) throws CmsException { } }
CmsResource res = cms . readResource ( id ) ; return cms . getSitePath ( res ) ;
public class ServerRequestInitSession { /** * Update link referrer params like play store referrer params * For link clicked installs link click id is updated when install referrer broadcast is received * Also update any googleSearchReferrer available with play store referrer broadcast * @ see InstallListener * @ see Branch # setPlayStoreReferrerCheckTimeout ( long ) */ void updateLinkReferrerParams ( ) { } }
// Add link identifier if present String linkIdentifier = prefHelper_ . getLinkClickIdentifier ( ) ; if ( ! linkIdentifier . equals ( PrefHelper . NO_STRING_VALUE ) ) { try { getPost ( ) . put ( Defines . Jsonkey . LinkIdentifier . getKey ( ) , linkIdentifier ) ; getPost ( ) . put ( Defines . Jsonkey . FaceBookAppLinkChecked . getKey ( ) , prefHelper_ . getIsAppLinkTriggeredInit ( ) ) ; } catch ( JSONException ignore ) { } } // Add Google search install referrer if present String googleSearchInstallIdentifier = prefHelper_ . getGoogleSearchInstallIdentifier ( ) ; if ( ! googleSearchInstallIdentifier . equals ( PrefHelper . NO_STRING_VALUE ) ) { try { getPost ( ) . put ( Defines . Jsonkey . GoogleSearchInstallReferrer . getKey ( ) , googleSearchInstallIdentifier ) ; } catch ( JSONException ignore ) { } } // Add Google play raw referrer if present String googlePlayReferrer = prefHelper_ . getGooglePlayReferrer ( ) ; if ( ! googlePlayReferrer . equals ( PrefHelper . NO_STRING_VALUE ) ) { try { getPost ( ) . put ( Defines . Jsonkey . GooglePlayInstallReferrer . getKey ( ) , googlePlayReferrer ) ; } catch ( JSONException ignore ) { } } // Check for Conversion from instant app to full app if ( prefHelper_ . isFullAppConversion ( ) ) { try { getPost ( ) . put ( Defines . Jsonkey . AndroidAppLinkURL . getKey ( ) , prefHelper_ . getAppLink ( ) ) ; getPost ( ) . put ( Defines . Jsonkey . IsFullAppConv . getKey ( ) , true ) ; } catch ( JSONException ignore ) { } }
public class InventoryData { /** * Adds the given source to the list of sources for the widget . * @ param source The source to add to the list of sources */ public void addSource ( String source ) { } }
if ( this . sources == null ) this . sources = new ArrayList < String > ( ) ; this . sources . add ( source ) ;
public class RequestToken { /** * Loads a request token from the bundle * and immediately tries to resume the request with handler * @ param bundle a non null bundle * @ param name the key of the saved token * @ param handler a handler to resume the request with . * @ return the token if resumed , null otherwise */ public static RequestToken loadAndResume ( Bundle bundle , String name , BaasHandler < ? > handler ) { } }
if ( bundle == null ) throw new IllegalArgumentException ( "bunlde cannot be null" ) ; if ( name == null ) throw new IllegalArgumentException ( "name cannot be null" ) ; RequestToken token = bundle . getParcelable ( name ) ; if ( token != null && token . resume ( handler ) ) { return token ; } return null ;
public class DefaultNlsTemplateResolver { /** * This method initializes the { @ link NlsTemplate } s for reverse lookup for { @ link NlsBundle } s . * @ param map the { @ link Map } where to { @ link Map # put ( Object , Object ) register } the { @ link NlsTemplate } s by * their { @ link net . sf . mmm . util . nls . api . NlsMessage # getInternationalizedMessage ( ) i18n message } . */ protected void initTemplatesForNlsBundles ( Map < String , NlsTemplate > map ) { } }
if ( this . bundleFactory instanceof AbstractNlsBundleFactory ) { Collection < ? extends NlsBundleDescriptor > bundleDescriptors = ( ( AbstractNlsBundleFactory ) this . bundleFactory ) . getNlsBundleDescriptors ( ) ; for ( NlsBundleDescriptor descriptor : bundleDescriptors ) { for ( Provider < NlsTemplate > container : descriptor . getTemplateContainers ( ) ) { NlsTemplate template = container . get ( ) ; if ( template instanceof NlsTemplateImplWithMessage ) { String message = template . translate ( AbstractNlsMessage . LOCALE_ROOT ) ; map . put ( message , template ) ; } } } }
public class AccountingDate { /** * Obtains an { @ code AccountingDate } representing a date in the given Accounting calendar * system from the proleptic - year and day - of - year fields . * This returns an { @ code AccountingDate } with the specified fields . * The day must be valid for the year , otherwise an exception will be thrown . * @ param chronology the Accounting chronology to base the date on , not null * @ param prolepticYear the Accounting proleptic - year * @ param dayOfYear the Accounting day - of - year , from 1 to 371 * @ return the date in Accounting calendar system , not null * @ throws DateTimeException if the value of any field is out of range , * or if the day - of - year is invalid for the year , * NullPointerException if an AccountingChronology was not provided */ static AccountingDate ofYearDay ( AccountingChronology chronology , int prolepticYear , int dayOfYear ) { } }
Objects . requireNonNull ( chronology , "A previously setup chronology is required." ) ; YEAR . checkValidValue ( prolepticYear ) ; DAY_OF_YEAR_RANGE . checkValidValue ( dayOfYear , DAY_OF_YEAR ) ; boolean leap = chronology . isLeapYear ( prolepticYear ) ; if ( dayOfYear > WEEKS_IN_YEAR * DAYS_IN_WEEK && ! leap ) { throw new DateTimeException ( "Invalid date 'DayOfYear " + dayOfYear + "' as '" + prolepticYear + "' is not a leap year" ) ; } int month = ( leap ? chronology . getDivision ( ) . getMonthFromElapsedWeeks ( ( dayOfYear - 1 ) / DAYS_IN_WEEK , chronology . getLeapWeekInMonth ( ) ) : chronology . getDivision ( ) . getMonthFromElapsedWeeks ( ( dayOfYear - 1 ) / DAYS_IN_WEEK ) ) ; int dayOfMonth = dayOfYear - ( leap ? chronology . getDivision ( ) . getWeeksAtStartOfMonth ( month , chronology . getLeapWeekInMonth ( ) ) : chronology . getDivision ( ) . getWeeksAtStartOfMonth ( month ) ) * DAYS_IN_WEEK ; return new AccountingDate ( chronology , prolepticYear , month , dayOfMonth ) ;
public class RemoveTargetsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RemoveTargetsRequest removeTargetsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( removeTargetsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( removeTargetsRequest . getRule ( ) , RULE_BINDING ) ; protocolMarshaller . marshall ( removeTargetsRequest . getIds ( ) , IDS_BINDING ) ; protocolMarshaller . marshall ( removeTargetsRequest . getForce ( ) , FORCE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsAliasManager { /** * Updates the aliases in the database . < p > * @ param cms the current CMS context * @ param toDelete the collection of aliases to delete * @ param toAdd the collection of aliases to add * @ throws CmsException if something goes wrong */ public synchronized void updateAliases ( CmsObject cms , Collection < CmsAlias > toDelete , Collection < CmsAlias > toAdd ) throws CmsException { } }
checkPermissionsForMassEdit ( cms ) ; Set < CmsUUID > allKeys = new HashSet < CmsUUID > ( ) ; Multimap < CmsUUID , CmsAlias > toDeleteMap = ArrayListMultimap . create ( ) ; // first , group the aliases by structure id for ( CmsAlias alias : toDelete ) { toDeleteMap . put ( alias . getStructureId ( ) , alias ) ; allKeys . add ( alias . getStructureId ( ) ) ; } Multimap < CmsUUID , CmsAlias > toAddMap = ArrayListMultimap . create ( ) ; for ( CmsAlias alias : toAdd ) { toAddMap . put ( alias . getStructureId ( ) , alias ) ; allKeys . add ( alias . getStructureId ( ) ) ; } // Do all the deletions first , so we don ' t run into duplicate key errors for the alias paths for ( CmsUUID structureId : allKeys ) { Set < CmsAlias > aliasesToSave = new HashSet < CmsAlias > ( getAliasesForStructureId ( cms , structureId ) ) ; Collection < CmsAlias > toDeleteForId = toDeleteMap . get ( structureId ) ; if ( ( toDeleteForId != null ) && ! toDeleteForId . isEmpty ( ) ) { aliasesToSave . removeAll ( toDeleteForId ) ; } saveAliases ( cms , structureId , new ArrayList < CmsAlias > ( aliasesToSave ) ) ; } for ( CmsUUID structureId : allKeys ) { Set < CmsAlias > aliasesToSave = new HashSet < CmsAlias > ( getAliasesForStructureId ( cms , structureId ) ) ; Collection < CmsAlias > toAddForId = toAddMap . get ( structureId ) ; if ( ( toAddForId != null ) && ! toAddForId . isEmpty ( ) ) { aliasesToSave . addAll ( toAddForId ) ; } saveAliases ( cms , structureId , new ArrayList < CmsAlias > ( aliasesToSave ) ) ; }
public class CalendarCodeGenerator { /** * Populate the metaZone mapping . */ private void addMetaZones ( TypeSpec . Builder type , Map < String , MetaZone > metazones ) { } }
ClassName metazoneType = ClassName . get ( Types . PACKAGE_CLDR_DATES , "MetaZone" ) ; TypeName mapType = ParameterizedTypeName . get ( MAP , STRING , metazoneType ) ; FieldSpec . Builder field = FieldSpec . builder ( mapType , "metazones" , PROTECTED , STATIC , FINAL ) ; CodeBlock . Builder code = CodeBlock . builder ( ) ; code . beginControlFlow ( "new $T<$T, $T>() {" , HashMap . class , String . class , metazoneType ) ; for ( Map . Entry < String , MetaZone > entry : metazones . entrySet ( ) ) { String zoneId = entry . getKey ( ) ; MetaZone zone = entry . getValue ( ) ; code . beginControlFlow ( "\nput($S, new $T($S,\n new $T.Entry[] " , zoneId , metazoneType , zoneId , metazoneType ) ; int size = zone . metazones . size ( ) ; Collections . reverse ( zone . metazones ) ; for ( int i = 0 ; i < size ; i ++ ) { MetaZoneEntry meta = zone . metazones . get ( i ) ; if ( i > 0 ) { code . add ( ",\n" ) ; } code . add ( " new $T.Entry($S, " , metazoneType , meta . metazone ) ; if ( meta . from != null ) { code . add ( "/* $L */ $L, " , meta . fromString , meta . from . toEpochSecond ( ) ) ; } else { code . add ( "-1, " ) ; } if ( meta . to != null ) { code . add ( "/* $L */ $L)" , meta . toString , meta . to . toEpochSecond ( ) ) ; } else { code . add ( "-1)" ) ; } } code . endControlFlow ( "))" ) ; } code . endControlFlow ( "\n}" ) ; field . initializer ( code . build ( ) ) ; type . addField ( field . build ( ) ) ; MethodSpec . Builder method = MethodSpec . methodBuilder ( "getMetazone" ) . addModifiers ( PUBLIC , STATIC ) . addParameter ( String . class , "zoneId" ) . addParameter ( ZonedDateTime . class , "date" ) . returns ( String . class ) ; method . addStatement ( "$T zone = metazones.get(zoneId)" , metazoneType ) ; method . addStatement ( "return zone == null ? null : zone.applies(date)" ) ; type . addMethod ( method . build ( ) ) ;
public class WaitForStep { /** * { @ inheritDoc } */ @ Override public void setVariables ( Map < String , String > variables ) { } }
injectVariables ( variables , element ) ; if ( timeoutVariable != null ) { timeout = timeoutVariable . getConvertedValue ( variables ) ; }
public class MessageProcessor { /** * Fire an event notification of type TYPE _ SIB _ SECURITY _ NOT _ AUTHENTICATED * with reason SECURITY _ REASON _ NOT _ AUTHENTICATED . * @ param newState */ private void fireNotAuthenticatedEvent ( String userName ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "fireNotAuthenticatedEvent" , userName ) ; // Check that we have a RuntimeEventListener if ( _runtimeEventListener != null ) { // Build the message for the Notification String message = nls . getFormattedMessage ( "USER_NOT_AUTHORIZED_ERROR_CWSIP0301" , new Object [ ] { userName , getMessagingEngineName ( ) , getMessagingEngineBus ( ) } , null ) ; // Build the properties for the Notification Properties props = new Properties ( ) ; props . put ( SibNotificationConstants . KEY_OPERATION , SibNotificationConstants . OPERATION_CONNECT ) ; props . put ( SibNotificationConstants . KEY_SECURITY_USERID , userName ) ; props . put ( SibNotificationConstants . KEY_SECURITY_REASON , SibNotificationConstants . SECURITY_REASON_NOT_AUTHENTICATED ) ; // Fire the event _runtimeEventListener . runtimeEventOccurred ( _engine , SibNotificationConstants . TYPE_SIB_SECURITY_NOT_AUTHENTICATED , message , props ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Null RuntimeEventListener, cannot fire event" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "fireNotAuthenticatedEvent" ) ;
public class ParametersModule { /** * Creates a module which will bind { @ link Parameters } to the provided { @ code params } and will * also log the contents of the parameters to standard output at level { @ code info } . */ public static ParametersModule createAndDump ( Parameters params ) { } }
log . info ( params . dump ( ) ) ; return new ParametersModule ( params ) ;
public class TableAppender { /** * Open the table , flush all rows from start , but do not freeze the table * @ param util a XMLUtil instance for writing XML * @ param appendable where to write * @ throws IOException if an I / O error occurs during the flush */ public void flushAllAvailableRows ( final XMLUtil util , final Appendable appendable ) throws IOException { } }
this . appendPreamble ( util , appendable ) ; this . appendRows ( util , appendable , 0 ) ;
public class BpmnParse { /** * Parses a parallel gateway declaration . */ public ActivityImpl parseParallelGateway ( Element parallelGwElement , ScopeImpl scope ) { } }
ActivityImpl activity = createActivityOnScope ( parallelGwElement , scope ) ; activity . setActivityBehavior ( new ParallelGatewayActivityBehavior ( ) ) ; parseAsynchronousContinuationForActivity ( parallelGwElement , activity ) ; parseExecutionListenersOnScope ( parallelGwElement , activity ) ; for ( BpmnParseListener parseListener : parseListeners ) { parseListener . parseParallelGateway ( parallelGwElement , scope , activity ) ; } return activity ;
public class SecurityActions { /** * Get a Subject instance * @ param subjectFactory The subject factory * @ param domain The domain * @ return The instance */ static Subject createSubject ( final SubjectFactory subjectFactory , final String domain ) { } }
if ( System . getSecurityManager ( ) == null ) return subjectFactory . createSubject ( domain ) ; return AccessController . doPrivileged ( new PrivilegedAction < Subject > ( ) { public Subject run ( ) { return subjectFactory . createSubject ( domain ) ; } } ) ;
public class Query { /** * < pre > * { " $ or " : [ { field : < field > , regex : < ^ string $ > , caseInsensitive : < caseInsensitive > , . . . } , . . . ] } * < / pre > */ public static Query withStrings ( String field , String [ ] values , boolean caseInsensitive ) { } }
if ( caseInsensitive ) { List < Query > regexList = new ArrayList < Query > ( ) ; for ( String value : values ) { regexList . add ( withString ( field , value , true ) ) ; } return Query . or ( regexList ) ; } else { return Query . withValues ( field , Query . in , Literal . values ( values ) ) ; }
public class Util { /** * Get the Bard config object . The properties is set in " bard . properties " . * @ return The config object . */ public static CompositeConfiguration getConfig ( ) { } }
if ( config == null ) { config = new CompositeConfiguration ( ) ; String configFile = "bard.properties" ; if ( Util . class . getClassLoader ( ) . getResource ( configFile ) == null ) { return config ; } try { config . addConfiguration ( new PropertiesConfiguration ( "bard.properties" ) ) ; } catch ( ConfigurationException e ) { logger . error ( "Load Bard configuration \"bard.properties\" error: {}" , e ) ; } } return config ;
public class GreenPepperXmlRpcServer { /** * { @ inheritDoc } */ public String ping ( Vector < Object > repositoryParams ) { } }
try { Repository repository = loadRepository ( repositoryParams ) ; return SUCCESS ; } catch ( GreenPepperServerException e ) { return errorAsString ( e , "" ) ; }