signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class NodeUtil { /** * Returns { @ code true } if { @ code node } is guaranteed to be an ` Iterable ` that causes no * side - effects during iteration , { @ code false } otherwise . */ private static boolean isPureIterable ( Node node ) { } }
switch ( node . getToken ( ) ) { case ARRAYLIT : case STRING : case TEMPLATELIT : return true ; // These iterables are known to be pure . default : return false ; // Anything else , including a non - iterable ( e . g . ` null ` ) , would be impure . }
public class Category { /** * Determines whether the priority passed as parameter is enabled in the * underlying SLF4J logger . Each log4j priority is mapped directly to its * SLF4J equivalent , except for FATAL which is mapped as ERROR . * @ param p * the priority to check against * @ return true if this logger is enabled for the given level , false * otherwise . */ public boolean isEnabledFor ( Priority p ) { } }
switch ( p . level ) { case Level . TRACE_INT : return slf4jLogger . isTraceEnabled ( ) ; case Level . DEBUG_INT : return slf4jLogger . isDebugEnabled ( ) ; case Level . INFO_INT : return slf4jLogger . isInfoEnabled ( ) ; case Level . WARN_INT : return slf4jLogger . isWarnEnabled ( ) ; case Level . ERROR_INT : return slf4jLogger . isErrorEnabled ( ) ; case Priority . FATAL_INT : return slf4jLogger . isErrorEnabled ( ) ; } return false ;
public class GoogleMapShape { /** * Expand the bounding box by the LatLng * @ param boundingBox bounding box * @ param latLng lat lng */ private void expandBoundingBox ( BoundingBox boundingBox , LatLng latLng ) { } }
double latitude = latLng . latitude ; double longitude = latLng . longitude ; if ( boundingBox . getMinLongitude ( ) <= 3 * ProjectionConstants . WGS84_HALF_WORLD_LON_WIDTH && boundingBox . getMaxLongitude ( ) >= 3 * - ProjectionConstants . WGS84_HALF_WORLD_LON_WIDTH ) { if ( longitude < boundingBox . getMinLongitude ( ) ) { if ( boundingBox . getMinLongitude ( ) - longitude > ( longitude + ( 2 * ProjectionConstants . WGS84_HALF_WORLD_LON_WIDTH ) ) - boundingBox . getMaxLongitude ( ) ) { longitude += ( 2 * ProjectionConstants . WGS84_HALF_WORLD_LON_WIDTH ) ; } } else if ( longitude > boundingBox . getMaxLongitude ( ) ) { if ( longitude - boundingBox . getMaxLongitude ( ) > boundingBox . getMinLongitude ( ) - ( longitude - ( 2 * ProjectionConstants . WGS84_HALF_WORLD_LON_WIDTH ) ) ) { longitude -= ( 2 * ProjectionConstants . WGS84_HALF_WORLD_LON_WIDTH ) ; } } } if ( latitude < boundingBox . getMinLatitude ( ) ) { boundingBox . setMinLatitude ( latitude ) ; } if ( latitude > boundingBox . getMaxLatitude ( ) ) { boundingBox . setMaxLatitude ( latitude ) ; } if ( longitude < boundingBox . getMinLongitude ( ) ) { boundingBox . setMinLongitude ( longitude ) ; } if ( longitude > boundingBox . getMaxLongitude ( ) ) { boundingBox . setMaxLongitude ( longitude ) ; }
public class DSLMapWalker { /** * src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 16:1 : mapping _ file returns [ DSLMapping mapping ] : ^ ( VT _ DSL _ GRAMMAR ( valid _ entry ) * ) ; */ public final DSLMapping mapping_file ( ) throws RecognitionException { } }
mapping_file_stack . push ( new mapping_file_scope ( ) ) ; DSLMapping mapping = null ; mapping_file_stack . peek ( ) . retval = new DefaultDSLMapping ( ) ; try { // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 23:5 : ( ^ ( VT _ DSL _ GRAMMAR ( valid _ entry ) * ) ) // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 23:7 : ^ ( VT _ DSL _ GRAMMAR ( valid _ entry ) * ) { match ( input , VT_DSL_GRAMMAR , FOLLOW_VT_DSL_GRAMMAR_in_mapping_file63 ) ; if ( input . LA ( 1 ) == Token . DOWN ) { match ( input , Token . DOWN , null ) ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 23:24 : ( valid _ entry ) * loop1 : while ( true ) { int alt1 = 2 ; int LA1_0 = input . LA ( 1 ) ; if ( ( LA1_0 == VT_ENTRY ) ) { alt1 = 1 ; } switch ( alt1 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 23:24 : valid _ entry { pushFollow ( FOLLOW_valid_entry_in_mapping_file65 ) ; valid_entry ( ) ; state . _fsp -- ; } break ; default : break loop1 ; } } match ( input , Token . UP , null ) ; } mapping = mapping_file_stack . peek ( ) . retval ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving mapping_file_stack . pop ( ) ; } return mapping ;
public class JsonSchema { /** * Set properties for { @ link JsonSchema } from a { @ link JsonObject } . * @ param jsonObject */ private void setJsonSchemaProperties ( JsonObject jsonObject ) { } }
setColumnName ( jsonObject . get ( COLUMN_NAME_KEY ) . getAsString ( ) ) ; setDataType ( jsonObject . get ( DATA_TYPE_KEY ) . getAsJsonObject ( ) ) ; setNullable ( jsonObject . has ( IS_NULLABLE_KEY ) && jsonObject . get ( IS_NULLABLE_KEY ) . getAsBoolean ( ) ) ; setComment ( getOptionalProperty ( jsonObject , COMMENT_KEY ) ) ; setDefaultValue ( getOptionalProperty ( jsonObject , DEFAULT_VALUE_KEY ) ) ;
public class ClassPathResource { /** * Resolves a URL for the underlying class path resource . * @ return the resolved URL , or { @ code null } if not resolvable */ protected URL resolveURL ( ) { } }
if ( this . clazz != null ) { return this . clazz . getResource ( this . path ) ; } else if ( this . classLoader != null ) { return this . classLoader . getResource ( this . path ) ; } else { return ClassLoader . getSystemResource ( this . path ) ; }
public class FirstNonNullHelper { /** * Gets first result of set of function which is not null . * @ param < T > type of values . * @ param < R > element to return . * @ param input input to provide to all functions . * @ param function first function to apply ( separate to indicate at least one should be provided ) * @ param functions all possible functions that might be able to supply a value . * @ return first result which was not null , * OR < code > null < / code > if result for each function ' s results was < code > null < / code > . */ public static < T , R > R firstNonNull ( T input , Function < T , R > function , Function < T , R > ... functions ) { } }
return firstNonNull ( f -> f . apply ( input ) , Stream . concat ( Stream . of ( function ) , Stream . of ( functions ) ) ) ;
public class KamDialect { /** * { @ inheritDoc } */ @ Override public Collection < KamNode > getNodes ( NodeFilter filter ) { } }
return wrapNodes ( kam . getNodes ( filter ) ) ;
public class EvolutionStart { /** * Create a new evolution start object with the given population and for the * given generation . * @ param < G > the gene type * @ param < C > the fitness type * @ param population the start population . * @ param generation the start generation of the population * @ return a new evolution start object * @ throws java . lang . NullPointerException if the given { @ code population } is * { @ code null } . * @ throws IllegalArgumentException if the given { @ code generation } is * smaller then one */ public static < G extends Gene < ? , G > , C extends Comparable < ? super C > > EvolutionStart < G , C > of ( final ISeq < Phenotype < G , C > > population , final long generation ) { } }
return new EvolutionStart < > ( population , generation ) ;
public class PrefixedProperties { /** * Gets the prefixed key and parse it to an String [ ] < br > * Each comma - separated list can be used . * @ param key * the key * @ return String [ ] or null if the key couldn ' t get found . */ public String [ ] getArray ( final String key ) { } }
final String value = getProperty ( key ) ; if ( value != null ) { final String [ ] strings = value . split ( ",[\\s]*|[\\s]*$" ) ; return strings ; } return null ;
public class ParticipantReader { /** * Add the requested query string arguments to the Request . * @ param request Request to add query string arguments to */ private void addQueryParams ( final Request request ) { } }
if ( muted != null ) { request . addQueryParam ( "Muted" , muted . toString ( ) ) ; } if ( hold != null ) { request . addQueryParam ( "Hold" , hold . toString ( ) ) ; } if ( coaching != null ) { request . addQueryParam ( "Coaching" , coaching . toString ( ) ) ; } if ( getPageSize ( ) != null ) { request . addQueryParam ( "PageSize" , Integer . toString ( getPageSize ( ) ) ) ; }
public class PluralFormat { /** * Formats a plural message for a given number and appends the formatted * message to the given < code > StringBuffer < / code > . * @ param number a number object ( instance of < code > Number < / code > for which * the plural message should be formatted . If no pattern has been * applied to this < code > PluralFormat < / code > object yet , the * formatted number will be returned . * Note : If this object is not an instance of < code > Number < / code > , * the < code > toAppendTo < / code > will not be modified . * @ param toAppendTo the formatted message will be appended to this * < code > StringBuffer < / code > . * @ param pos will be ignored by this method . * @ return the string buffer passed in as toAppendTo , with formatted text * appended . * @ throws IllegalArgumentException if number is not an instance of Number */ @ Override public StringBuffer format ( Object number , StringBuffer toAppendTo , FieldPosition pos ) { } }
if ( ! ( number instanceof Number ) ) { throw new IllegalArgumentException ( "'" + number + "' is not a Number" ) ; } Number numberObject = ( Number ) number ; toAppendTo . append ( format ( numberObject , numberObject . doubleValue ( ) ) ) ; return toAppendTo ;
public class CmsCroppingDialog { /** * Handles the click event for ok button . Sets the selected cropping parameters . < p > * @ param event the click event */ @ UiHandler ( "m_okButton" ) protected void onOk ( ClickEvent event ) { } }
if ( ! ( ( m_croppingParam . getTargetWidth ( ) > 0 ) && ( m_croppingParam . getTargetHeight ( ) > 0 ) ) ) { if ( m_croppingParam . getTargetWidth ( ) > 0 ) { m_croppingParam . setTargetHeight ( ( int ) Math . floor ( ( 1.00 * m_croppingParam . getTargetWidth ( ) * m_croppingParam . getCropHeight ( ) ) / m_croppingParam . getCropWidth ( ) ) ) ; } else if ( m_croppingParam . getTargetHeight ( ) > 0 ) { m_croppingParam . setTargetWidth ( ( int ) Math . floor ( ( 1.00 * m_croppingParam . getTargetHeight ( ) * m_croppingParam . getCropWidth ( ) ) / m_croppingParam . getCropHeight ( ) ) ) ; } else { m_croppingParam . setTargetHeight ( m_croppingParam . getCropHeight ( ) ) ; m_croppingParam . setTargetWidth ( m_croppingParam . getCropWidth ( ) ) ; } } ValueChangeEvent . fire ( this , m_croppingParam ) ; hide ( ) ;
public class PersistentState { /** * Read and return the state of parsing for a particular log file . * @ param fname the log file for which to read the state */ public static ParseState getState ( String fname ) { } }
String [ ] fields = persData . getProperty ( fname , "null" + SEPARATOR + "0" ) . split ( SEPARATOR , 2 ) ; String firstLine ; long offset ; if ( fields . length < 2 ) { System . err . println ( "Malformed persistent state data found" ) ; Environment . logInfo ( "Malformed persistent state data found" ) ; firstLine = null ; offset = 0 ; } else { firstLine = ( fields [ 0 ] . equals ( "null" ) ? null : fields [ 0 ] ) ; offset = Long . parseLong ( fields [ 1 ] ) ; } return new ParseState ( fname , firstLine , offset ) ;
public class Kafka { /** * Configures to start reading partitions from specific offsets and specifies the given offset for * the given partition . * @ param partition partition index * @ param specificOffset partition offset to start reading from * @ see FlinkKafkaConsumerBase # setStartFromSpecificOffsets ( Map ) */ public Kafka startFromSpecificOffset ( int partition , long specificOffset ) { } }
this . startupMode = StartupMode . SPECIFIC_OFFSETS ; if ( this . specificOffsets == null ) { this . specificOffsets = new HashMap < > ( ) ; } this . specificOffsets . put ( partition , specificOffset ) ; return this ;
public class PutConfigurationAggregatorRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PutConfigurationAggregatorRequest putConfigurationAggregatorRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( putConfigurationAggregatorRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( putConfigurationAggregatorRequest . getConfigurationAggregatorName ( ) , CONFIGURATIONAGGREGATORNAME_BINDING ) ; protocolMarshaller . marshall ( putConfigurationAggregatorRequest . getAccountAggregationSources ( ) , ACCOUNTAGGREGATIONSOURCES_BINDING ) ; protocolMarshaller . marshall ( putConfigurationAggregatorRequest . getOrganizationAggregationSource ( ) , ORGANIZATIONAGGREGATIONSOURCE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FuncNumber { /** * Execute the function . The function must return * a valid object . * @ param xctxt The current execution context . * @ return A valid XObject . * @ throws javax . xml . transform . TransformerException */ public XObject execute ( XPathContext xctxt ) throws javax . xml . transform . TransformerException { } }
return new XNumber ( getArg0AsNumber ( xctxt ) ) ;
public class SelectResultSet { /** * Grow data array . */ private void growDataArray ( ) { } }
int newCapacity = data . length + ( data . length >> 1 ) ; if ( newCapacity - MAX_ARRAY_SIZE > 0 ) { newCapacity = MAX_ARRAY_SIZE ; } data = Arrays . copyOf ( data , newCapacity ) ;
public class AutomationAccountsInner { /** * Update an automation account . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param parameters Parameters supplied to the update automation account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the AutomationAccountInner object if successful . */ public AutomationAccountInner update ( String resourceGroupName , String automationAccountName , AutomationAccountUpdateParameters parameters ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , automationAccountName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AJP13Packet { public void write ( OutputStream out ) throws IOException { } }
if ( log . isTraceEnabled ( ) ) log . trace ( "AJP13 snd: " + this . toString ( 64 ) ) ; // System . err . println ( Thread . currentThread ( ) + " AJP13 snd // " + this . toString ( ) ) ; out . write ( _buf , 0 , _bytes ) ;
public class FileUtil { /** * public static final int NAMECONFLICT _ CLOSURE = 32 ; / / FUTURE */ public static int toNameConflict ( String nameConflict ) throws ApplicationException { } }
if ( StringUtil . isEmpty ( nameConflict , true ) ) return NAMECONFLICT_UNDEFINED ; nameConflict = nameConflict . trim ( ) . toLowerCase ( ) ; if ( "error" . equals ( nameConflict ) ) return NAMECONFLICT_ERROR ; if ( "skip" . equals ( nameConflict ) || "ignore" . equals ( nameConflict ) ) return NAMECONFLICT_SKIP ; if ( "merge" . equals ( nameConflict ) || "overwrite" . equals ( nameConflict ) ) return NAMECONFLICT_OVERWRITE ; if ( "makeunique" . equals ( nameConflict ) || "unique" . equals ( nameConflict ) ) return NAMECONFLICT_MAKEUNIQUE ; throw new ApplicationException ( "Invalid value for attribute nameConflict [" + nameConflict + "]" , "valid values are [" + fromNameConflictBitMask ( Integer . MAX_VALUE ) + "]" ) ;
public class FSA5 { /** * Returns the number encoded at the given node . The number equals the count * of the set of suffixes reachable from < code > node < / code > ( called its right * language ) . */ @ Override public int getRightLanguageCount ( int node ) { } }
assert getFlags ( ) . contains ( FSAFlags . NUMBERS ) : "This FSA was not compiled with NUMBERS." ; return decodeFromBytes ( arcs , node , nodeDataLength ) ;
public class TopScreen { /** * Make sure I am allowed access to this screen . * @ param strClassResource * @ return */ public ScreenModel checkSecurity ( ScreenModel screen , ScreenModel parentScreen ) { } }
int iErrorCode = DBConstants . NORMAL_RETURN ; if ( screen != null ) iErrorCode = ( ( BaseScreen ) screen ) . checkSecurity ( ) ; if ( iErrorCode == Constants . READ_ACCESS ) { ( ( BaseScreen ) screen ) . setEditing ( false ) ; ( ( BaseScreen ) screen ) . setAppending ( false ) ; iErrorCode = DBConstants . NORMAL_RETURN ; } if ( iErrorCode == DBConstants . NORMAL_RETURN ) return screen ; // Good , access allowed else { if ( screen != null ) screen . free ( ) ; return this . getSecurityScreen ( iErrorCode , ( BasePanel ) parentScreen ) ; // Create and return the login or error screen }
public class ConfigWebUtil { /** * generate a file object by the string definition * @ param rootDir * @ param strDir * @ param defaultDir * @ param configDir * @ param type * @ param config * @ return file */ static Resource getFile ( Resource rootDir , String strDir , String defaultDir , Resource configDir , short type , ConfigImpl config ) { } }
strDir = replacePlaceholder ( strDir , config ) ; if ( ! StringUtil . isEmpty ( strDir , true ) ) { Resource res ; if ( strDir . indexOf ( "://" ) != - 1 ) { // TODO better impl . res = getFile ( config . getResource ( strDir ) , type ) ; if ( res != null ) return res ; } res = rootDir == null ? null : getFile ( rootDir . getRealResource ( strDir ) , type ) ; if ( res != null ) return res ; res = getFile ( config . getResource ( strDir ) , type ) ; if ( res != null ) return res ; } if ( defaultDir == null ) return null ; Resource file = getFile ( configDir . getRealResource ( defaultDir ) , type ) ; return file ;
public class Optional { /** * If a value is present , invoke the specified consumer with the value , otherwise do nothing . * @ param consumer consumer to be invoked if present . * @ return this Optional * @ since 1.7.8 */ public Optional < T > ifPresent ( final Consumer < ? super T > consumer ) { } }
if ( isPresent ( ) ) { consumer . accept ( get ( ) ) ; } return this ;
public class DoubleTuples { /** * Randomize the given tuple with a gaussian * distribution with a mean of 0.0 and standard deviation of 1.0 * @ param t The tuple to fill * @ param random The random number generator */ public static void randomizeGaussian ( MutableDoubleTuple t , Random random ) { } }
for ( int i = 0 ; i < t . getSize ( ) ; i ++ ) { double value = random . nextGaussian ( ) ; t . set ( i , value ) ; }
public class HelloSignClient { /** * Instructs HelloSign to email the given address with a reminder to sign * the SignatureRequest referenced by the given request ID . * Note : You cannot send a reminder within 1 hours of the last reminder that * was sent , manually or automatically . * @ param requestId String SignatureRequest ID * @ param email String email * @ return SignatureRequest The request to be reminded * @ throws HelloSignException thrown if there ' s a problem processing the * HTTP request or the JSON response . */ public SignatureRequest requestEmailReminder ( String requestId , String email ) throws HelloSignException { } }
String url = BASE_URI + SIGNATURE_REQUEST_REMIND_URI + "/" + requestId ; return new SignatureRequest ( httpClient . withAuth ( auth ) . withPostField ( Account . ACCOUNT_EMAIL_ADDRESS , email ) . post ( url ) . asJson ( ) ) ;
public class BeanUtils { /** * Set all properties of the target bean to the values obtained * from the source bean . < br > * < br > * This method will extract all properties of the target bean class * using { @ link # getMutablePropertyNamesOptional ( Class ) } . For each * property , the corresponding read - method of the source bean class * will be called to obtain the value . This value will then be passed * to the target bean by calling the write - method for the property . < br > * < br > * Any checked exception that may be thrown internally will silently * be ignored . * @ param targetBean The target bean * @ param sourceBean The source bean */ public static void setAllOptional ( Object targetBean , Object sourceBean ) { } }
Class < ? > sourceClass = targetBean . getClass ( ) ; Class < ? > targetClass = targetBean . getClass ( ) ; List < String > propertyNames = getMutablePropertyNamesOptional ( targetClass ) ; for ( String propertyName : propertyNames ) { Method readMethod = getReadMethodOptional ( sourceClass , propertyName ) ; Method writeMethod = getWriteMethodOptional ( targetClass , propertyName ) ; if ( readMethod != null && writeMethod != null ) { try { Object value = readMethod . invoke ( sourceBean ) ; Methods . invokeOptional ( writeMethod , targetBean , value ) ; } catch ( IllegalArgumentException e ) { // Ignore } catch ( IllegalAccessException e ) { // Ignore } catch ( InvocationTargetException e ) { // Ignore } catch ( SecurityException e ) { // Ignore } } }
public class CodeGenerator { /** * generate access { @ link Field } value source code . support public field access , getter method access and reflection * access . * @ param target * @ param field * @ param cls * @ param express * @ param isList * @ return */ protected String getSetToField ( String target , Field field , Class < ? > cls , String express , boolean isList , boolean isMap , boolean packed ) { } }
StringBuilder ret = new StringBuilder ( ) ; if ( isList || isMap ) { ret . append ( "if ((" ) . append ( getAccessByField ( target , field , cls ) ) . append ( ") == null) {" ) . append ( LINE_BREAK ) ; } // if field of public modifier we can access directly if ( Modifier . isPublic ( field . getModifiers ( ) ) ) { if ( isList ) { // should initialize list ret . append ( target ) . append ( ClassHelper . PACKAGE_SEPARATOR ) . append ( field . getName ( ) ) . append ( "= new ArrayList()" ) . append ( JAVA_LINE_BREAK ) . append ( "}" ) . append ( LINE_BREAK ) ; if ( express != null ) { if ( packed ) { ret . append ( "while (input.getBytesUntilLimit() > 0) {" ) . append ( LINE_BREAK ) ; } ret . append ( target ) . append ( ClassHelper . PACKAGE_SEPARATOR ) . append ( field . getName ( ) ) . append ( ".add(" ) . append ( express ) . append ( ")" ) ; if ( packed ) { ret . append ( ";}" ) . append ( LINE_BREAK ) ; } } return ret . toString ( ) ; } else if ( isMap ) { ret . append ( target ) . append ( ClassHelper . PACKAGE_SEPARATOR ) . append ( field . getName ( ) ) . append ( "= new HashMap()" ) . append ( JAVA_LINE_BREAK ) . append ( "}" ) . append ( LINE_BREAK ) ; return ret . append ( express ) . toString ( ) ; } return target + ClassHelper . PACKAGE_SEPARATOR + field . getName ( ) + "=" + express + LINE_BREAK ; } String setter = "set" + CodedConstant . capitalize ( field . getName ( ) ) ; // check method exist try { cls . getMethod ( setter , new Class < ? > [ ] { field . getType ( ) } ) ; if ( isList ) { ret . append ( "List __list = new ArrayList()" ) . append ( JAVA_LINE_BREAK ) ; ret . append ( target ) . append ( ClassHelper . PACKAGE_SEPARATOR ) . append ( setter ) . append ( "(__list)" ) . append ( JAVA_LINE_BREAK ) . append ( "}" ) . append ( LINE_BREAK ) ; if ( express != null ) { if ( packed ) { ret . append ( "while (input.getBytesUntilLimit() > 0) {" ) . append ( LINE_BREAK ) ; } ret . append ( "(" ) . append ( getAccessByField ( target , field , cls ) ) . append ( ").add(" ) . append ( express ) . append ( ")" ) ; if ( packed ) { ret . append ( ";}" ) . append ( LINE_BREAK ) ; } } return ret . toString ( ) ; } else if ( isMap ) { ret . append ( "Map __map = new HashMap()" ) . append ( JAVA_LINE_BREAK ) ; ret . append ( target ) . append ( ClassHelper . PACKAGE_SEPARATOR ) . append ( setter ) . append ( "(__map)" ) . append ( JAVA_LINE_BREAK ) . append ( "}" ) . append ( LINE_BREAK ) ; return ret + express ; } return target + ClassHelper . PACKAGE_SEPARATOR + setter + "(" + express + ")\n" ; } catch ( Exception e ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( e . getMessage ( ) , e ) ; } } if ( isList ) { ret . append ( "List __list = new ArrayList()" ) . append ( JAVA_LINE_BREAK ) ; ret . append ( "FieldUtils.setField(" ) . append ( target ) . append ( ", \"" ) . append ( field . getName ( ) ) . append ( "\", __list)" ) . append ( JAVA_LINE_BREAK ) . append ( "}" ) . append ( LINE_BREAK ) ; if ( express != null ) { if ( packed ) { ret . append ( "while (input.getBytesUntilLimit() > 0) {" ) . append ( LINE_BREAK ) ; } ret . append ( "(" ) . append ( getAccessByField ( target , field , cls ) ) . append ( ").add(" ) . append ( express ) . append ( ")" ) ; if ( packed ) { ret . append ( ";}" ) . append ( LINE_BREAK ) ; } } return ret . toString ( ) ; } else if ( isMap ) { ret . append ( "Map __map = new HashMap()" ) . append ( JAVA_LINE_BREAK ) ; ret . append ( "FieldUtils.setField(" ) . append ( target ) . append ( ", \"" ) . append ( field . getName ( ) ) . append ( "\", __map)" ) . append ( JAVA_LINE_BREAK ) . append ( "}" ) . append ( LINE_BREAK ) ; return ret + express ; } // use reflection to get value String code = "" ; if ( express != null ) { code = "FieldUtils.setField(" + target + ", \"" + field . getName ( ) + "\", " + express + ")" + LINE_BREAK ; } return code ;
public class CPOptionValuePersistenceImpl { /** * Returns the last cp option value in the ordered set where CPOptionId = & # 63 ; . * @ param CPOptionId the cp option ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp option value , or < code > null < / code > if a matching cp option value could not be found */ @ Override public CPOptionValue fetchByCPOptionId_Last ( long CPOptionId , OrderByComparator < CPOptionValue > orderByComparator ) { } }
int count = countByCPOptionId ( CPOptionId ) ; if ( count == 0 ) { return null ; } List < CPOptionValue > list = findByCPOptionId ( CPOptionId , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class QueryCacheEndToEndProvider { /** * Idempotent query cache create mechanism . */ public InternalQueryCache < K , V > tryCreateQueryCache ( String mapName , String cacheName , ConstructorFunction < String , InternalQueryCache < K , V > > constructor ) { } }
ContextMutexFactory . Mutex mutex = lifecycleMutexFactory . mutexFor ( mapName ) ; try { synchronized ( mutex ) { ConcurrentMap < String , InternalQueryCache < K , V > > queryCacheRegistry = getOrPutIfAbsent ( queryCacheRegistryPerMap , mapName , queryCacheRegistryConstructor ) ; InternalQueryCache < K , V > queryCache = queryCacheRegistry . get ( cacheName ) ; // if this is a recreation we expect to have a Uuid otherwise we // need to generate one for the first creation of query cache . String cacheId = queryCache == null ? UuidUtil . newUnsecureUuidString ( ) : queryCache . getCacheId ( ) ; queryCache = constructor . createNew ( cacheId ) ; if ( queryCache != NULL_QUERY_CACHE ) { queryCacheRegistry . put ( cacheName , queryCache ) ; return queryCache ; } return null ; } } finally { closeResource ( mutex ) ; }
public class AbstractParser { /** * convert an xml attribute in boolean value . Empty elements results in default value * @ param reader the StAX reader * @ param attributeName the name of the attribute * @ param defaultValue defaultValue * @ param expressions The expressions * @ return the boolean representing element * @ throws XMLStreamException StAX exception * @ throws ParserException in case of not valid boolean for given attribute */ protected Boolean attributeAsBoolean ( XMLStreamReader reader , String attributeName , Boolean defaultValue , Map < String , String > expressions ) throws XMLStreamException , ParserException { } }
String attributeString = rawAttributeText ( reader , attributeName ) ; if ( attributeName != null && expressions != null && attributeString != null && attributeString . indexOf ( "${" ) != - 1 ) expressions . put ( attributeName , attributeString ) ; String stringValue = getSubstitutionValue ( attributeString ) ; if ( StringUtils . isEmpty ( stringValue ) || stringValue . trim ( ) . equalsIgnoreCase ( "true" ) || stringValue . trim ( ) . equalsIgnoreCase ( "false" ) ) { return StringUtils . isEmpty ( stringValue ) ? defaultValue : Boolean . valueOf ( stringValue . trim ( ) ) ; } else { throw new ParserException ( bundle . attributeAsBoolean ( attributeString , reader . getLocalName ( ) ) ) ; }
public class StreamLoader { /** * Initializes queues */ private void initQueues ( ) { } }
LOGGER . debug ( "Init Queues" ) ; if ( _active . getAndSet ( true ) ) { // NOP if the loader is already active return ; } // start PUT and PROCESS queues _queuePut = new ArrayBlockingQueue < > ( 48 ) ; _queueProcess = new ArrayBlockingQueue < > ( 48 ) ; _put = new PutQueue ( this ) ; _process = new ProcessQueue ( this ) ; // Start queue . NOTE : This is not actively used _queueData = new ArrayBlockingQueue < > ( 1024 ) ; _thread = new Thread ( this ) ; _thread . setName ( "StreamLoaderThread" ) ; _thread . start ( ) ; // Create stage _stage = new BufferStage ( this , _op , _csvFileBucketSize , _csvFileSize ) ;
public class CmsDriverManager { /** * Reads all resources that have a value ( containing the given value string ) set * for the specified property ( definition ) in the given path . < p > * Both individual and shared properties of a resource are checked . < p > * If the < code > value < / code > parameter is < code > null < / code > , all resources having the * given property set are returned . < p > * @ param dbc the current database context * @ param folder the folder to get the resources with the property from * @ param propertyDefinition the name of the property ( definition ) to check for * @ param value the string to search in the value of the property * @ param filter the resource filter to apply to the result set * @ return a list of all < code > { @ link CmsResource } < / code > objects * that have a value set for the specified property . * @ throws CmsException if something goes wrong */ public List < CmsResource > readResourcesWithProperty ( CmsDbContext dbc , CmsResource folder , String propertyDefinition , String value , CmsResourceFilter filter ) throws CmsException { } }
String cacheKey ; if ( value == null ) { cacheKey = getCacheKey ( new String [ ] { dbc . currentUser ( ) . getName ( ) , folder . getRootPath ( ) , propertyDefinition , filter . getCacheId ( ) } , dbc ) ; } else { cacheKey = getCacheKey ( new String [ ] { dbc . currentUser ( ) . getName ( ) , folder . getRootPath ( ) , propertyDefinition , value , filter . getCacheId ( ) } , dbc ) ; } List < CmsResource > resourceList = m_monitor . getCachedResourceList ( cacheKey ) ; if ( ( resourceList == null ) || ! dbc . getProjectId ( ) . isNullUUID ( ) ) { // first read the property definition CmsPropertyDefinition propDef = readPropertyDefinition ( dbc , propertyDefinition ) ; // now read the list of resources that have a value set for the property definition resourceList = getVfsDriver ( dbc ) . readResourcesWithProperty ( dbc , dbc . currentProject ( ) . getUuid ( ) , propDef . getId ( ) , folder . getRootPath ( ) , value ) ; // apply permission filter resourceList = filterPermissions ( dbc , resourceList , filter ) ; // store the result in the resourceList cache if ( dbc . getProjectId ( ) . isNullUUID ( ) ) { m_monitor . cacheResourceList ( cacheKey , resourceList ) ; } } // we must always apply the result filter and update the context dates return updateContextDates ( dbc , resourceList , filter ) ;
public class Image { /** * This function is to get image ' s width . * @ return the image ' s width * @ throws WebElementException */ public int getWidth ( ) { } }
try { return ( ( RemoteWebElement ) getElement ( ) ) . getSize ( ) . width ; } catch ( NumberFormatException e ) { throw new WebElementException ( "Attribute " + WIDTH + " not found for Image " + getLocator ( ) , e ) ; }
public class SerializerConfig { /** * Sets the class of the serializer implementation . * @ param clazz the set class of the serializer implementation * @ return SerializerConfig */ public SerializerConfig setClass ( final Class < ? extends Serializer > clazz ) { } }
String className = clazz == null ? null : clazz . getName ( ) ; return setClassName ( className ) ;
public class CmsLinkManager { /** * Returns a link < i > from < / i > the URI stored in the provided OpenCms user context * < i > to < / i > the VFS resource indicated by the given < code > link < / code > and < code > siteRoot < / code > , * for use on web pages , using the configured link substitution handler . < p > * The result will be an absolute link that contains the configured context path and * servlet name , and in the case of the " online " project it will also be rewritten according to * to the configured static export settings . < p > * In case < code > link < / code > is a relative URI , the current URI contained in the provided * OpenCms user context < code > cms < / code > is used to make the relative < code > link < / code > absolute . < p > * The provided < code > siteRoot < / code > is assumed to be the " home " of the link . * In case the current site of the given OpenCms user context < code > cms < / code > is different from the * provided < code > siteRoot < / code > , the full server prefix is appended to the result link . < p > * A server prefix is also added if * < ul > * < li > the link is contained in a normal document and the link references a secure document < / li > * < li > the link is contained in a secure document and the link references a normal document < / li > * < / ul > * Please note the above text describes the default behavior as implemented by * { @ link CmsDefaultLinkSubstitutionHandler } , which can be fully customized using the * { @ link I _ CmsLinkSubstitutionHandler } interface . < p > * @ param cms the current OpenCms user context * @ param link the link to process which is assumed to point to a VFS resource , with optional parameters * @ param siteRoot the site root of the < code > link < / code > * @ param targetDetailPage the target detail page , in case of linking to a specific detail page * @ param forceSecure if < code > true < / code > generates always an absolute URL ( with protocol and server name ) for secure links * @ return a link < i > from < / i > the URI stored in the provided OpenCms user context * < i > to < / i > the VFS resource indicated by the given < code > link < / code > and < code > siteRoot < / code > * @ see I _ CmsLinkSubstitutionHandler for the interface that can be used to fully customize the link substitution * @ see CmsDefaultLinkSubstitutionHandler for the default link substitution handler */ public String substituteLink ( CmsObject cms , String link , String siteRoot , String targetDetailPage , boolean forceSecure ) { } }
if ( targetDetailPage != null ) { return m_linkSubstitutionHandler . getLink ( cms , link , siteRoot , targetDetailPage , forceSecure ) ; } else { return m_linkSubstitutionHandler . getLink ( cms , link , siteRoot , forceSecure ) ; }
public class WebSocketFrame { /** * Parse the third and subsequent bytes of the payload as a close reason . * If any payload is not set or the length of the payload is less than 3, * this method returns { @ code null } . * The value returned from this method is meaningless if this frame * is not a close frame . * @ return * The close reason . */ public String getCloseReason ( ) { } }
if ( mPayload == null || mPayload . length < 3 ) { return null ; } return Misc . toStringUTF8 ( mPayload , 2 , mPayload . length - 2 ) ;
public class Token { /** * Return the Tag that matches the given class */ @ SuppressWarnings ( "unchecked" ) public < T extends Tag < ? > > List < T > getTags ( Class < T > tagClass ) { } }
List < T > matches = new LinkedList < T > ( ) ; Iterator < Tag < ? > > tagIter = _tags . iterator ( ) ; while ( tagIter . hasNext ( ) ) { Tag < ? > tag = tagIter . next ( ) ; if ( tagClass . isInstance ( tag ) ) { matches . add ( ( T ) tag ) ; } } return matches ;
public class PositionTracker { /** * Closes the current changing run by Merging new position changes into the existing position change map * after each round ( one round = consecutive changes along the text ) you need to call closeRun ( ) before submitting more position changes from a new round , * i . e . whenever you passed the string to be modified once call closeRun ( ) before starting to run over the string again with more replacements * Do this every time you ran once over the text making changes to be tracked */ public void closeRun ( ) { } }
if ( positionChanges . isEmpty ( ) ) return ; SortedMap < Integer , Integer > temp = positionChanges ; // adapt old positions to new mapping while ( ! positionMap . isEmpty ( ) ) { Integer key = positionMap . firstKey ( ) ; Collection < Integer > modifiers = old2NewMap . headMap ( key + 1 ) . values ( ) ; Integer newposition = key ; for ( Iterator < Integer > it = modifiers . iterator ( ) ; it . hasNext ( ) ; newposition += it . next ( ) ) { } Integer value = positionMap . get ( key ) ; if ( positionChanges . containsKey ( newposition ) ) value += positionChanges . get ( newposition ) ; positionChanges . put ( newposition , value ) ; positionMap . remove ( key ) ; } positionChanges = positionMap ; positionMap = temp ; old2NewMap . clear ( ) ; accumulatedModifier = 0 ; return ;
public class CPIImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . CPI__RG : return rg != null && ! rg . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class Redirect { /** * Redirects any HTTP request of type POST on ' fromPath ' to ' toPath ' with the provided redirect ' status ' code . * @ param fromPath from path * @ param toPath to path * @ param status status code */ public void post ( String fromPath , String toPath , Status status ) { } }
http . post ( fromPath , redirectRoute ( toPath , status ) ) ;
public class FindPositionArray { /** * Returns the position for a given number of occurrences or NOT _ FOUND if * this value is not found . * @ param nOccurrence * number of occurrences * @ return the position for a given number of occurrences or NOT _ FOUND if * this value is not found */ public long findPosition ( long nOccurrence ) { } }
updateCount ( ) ; if ( nOccurrence <= 0 ) { return RankedBitVector . NOT_FOUND ; } int findPos = ( int ) ( nOccurrence / this . blockSize ) ; if ( findPos < this . positionArray . length ) { long pos0 = this . positionArray [ findPos ] ; long leftOccurrences = nOccurrence - ( findPos * this . blockSize ) ; if ( leftOccurrences == 0 ) { return pos0 ; } for ( long index = pos0 + 1 ; index < this . bitVector . size ( ) ; index ++ ) { if ( this . bitVector . getBit ( index ) == this . bit ) { leftOccurrences -- ; } if ( leftOccurrences == 0 ) { return index ; } } } return RankedBitVector . NOT_FOUND ;
public class Vector3i { /** * Set the first two components from the given < code > v < / code > and the z * component from the given < code > z < / code > * @ param v * the { @ link Vector2ic } to copy the values from * @ param z * the z component * @ return this */ public Vector3i set ( Vector2ic v , int z ) { } }
return set ( v . x ( ) , v . y ( ) , z ) ;
public class GeometryUtilities { /** * Calculates the area of a polygon from its vertices . * @ param x the array of x coordinates . * @ param y the array of y coordinates . * @ param N the number of sides of the polygon . * @ return the area of the polygon . */ public static double getPolygonArea ( int [ ] x , int [ ] y , int N ) { } }
int i , j ; double area = 0 ; for ( i = 0 ; i < N ; i ++ ) { j = ( i + 1 ) % N ; area += x [ i ] * y [ j ] ; area -= y [ i ] * x [ j ] ; } area /= 2 ; return ( area < 0 ? - area : area ) ;
public class PlatformServicesImpl { /** * ( non - Javadoc ) * @ see org . osgi . framework . BundleContext # registerService ( String clazz , * Object service , Dictionary properties ) */ @ Override public IServiceRegistration registerService ( String clazz , Object service , Dictionary < String , String > properties ) { } }
final String sourceMethod = "registerService" ; // $ NON - NLS - 1 $ boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( PlatformServicesImpl . class . getName ( ) , sourceMethod , new Object [ ] { clazz , service , properties } ) ; } ServiceRegistrationOSGi serviceRegistrationOSGi = null ; ServiceRegistration < ? > serviceRegistration = null ; BundleContext bundleContext = Activator . getBundleContext ( ) ; if ( bundleContext != null ) { serviceRegistration = bundleContext . registerService ( clazz , service , properties ) ; serviceRegistrationOSGi = new ServiceRegistrationOSGi ( serviceRegistration ) ; } if ( isTraceLogging ) { log . exiting ( PlatformServicesImpl . class . getName ( ) , sourceMethod , serviceRegistrationOSGi ) ; } return serviceRegistrationOSGi ;
public class ChatController { /** * Handles conversation delete service response . * @ param conversationId Unique identifier of an conversation . * @ param result Service call response . * @ return Observable emitting result of operations . */ Observable < ChatResult > handleConversationDeleted ( String conversationId , ComapiResult < Void > result ) { } }
if ( result . getCode ( ) != ETAG_NOT_VALID ) { return persistenceController . deleteConversation ( conversationId ) . map ( success -> adapter . adaptResult ( result , success ) ) ; } else { return checkState ( ) . flatMap ( client -> client . service ( ) . messaging ( ) . getConversation ( conversationId ) . flatMap ( newResult -> { if ( newResult . isSuccessful ( ) ) { return persistenceController . upsertConversation ( ChatConversation . builder ( ) . populate ( newResult . getResult ( ) , newResult . getETag ( ) ) . build ( ) ) . flatMap ( success -> Observable . fromCallable ( ( ) -> new ChatResult ( false , success ? new ChatResult . Error ( ETAG_NOT_VALID , "Conversation updated, try delete again." , "Conversation " + conversationId + " updated in response to wrong eTag error when deleting." ) : new ChatResult . Error ( 0 , "Error updating custom store." , null ) ) ) ) ; } else { return Observable . fromCallable ( ( ) -> adapter . adaptResult ( newResult ) ) ; } } ) ) ; }
public class Requirement { /** * Builds a strict requirement ( will test that the version is equivalent to the requirement ) * @ param requirement the version of the requirement * @ return the generated requirement */ public static Requirement buildStrict ( String requirement ) { } }
return build ( new Semver ( requirement , Semver . SemverType . STRICT ) ) ;
public class MathService { /** * cross - product of 2 vectors */ private static double cross ( Coordinate a1 , Coordinate a2 , Coordinate b1 , Coordinate b2 ) { } }
return ( a2 . getX ( ) - a1 . getX ( ) ) * ( b2 . getY ( ) - b1 . getY ( ) ) - ( a2 . getY ( ) - a1 . getY ( ) ) * ( b2 . getX ( ) - b1 . getX ( ) ) ;
public class AmazonElasticLoadBalancingClient { /** * Adds the specified tags to the specified Elastic Load Balancing resource . You can tag your Application Load * Balancers , Network Load Balancers , and your target groups . * Each tag consists of a key and an optional value . If a resource already has a tag with the same key , * < code > AddTags < / code > updates its value . * To list the current tags for your resources , use < a > DescribeTags < / a > . To remove tags from your resources , use * < a > RemoveTags < / a > . * @ param addTagsRequest * @ return Result of the AddTags operation returned by the service . * @ throws DuplicateTagKeysException * A tag key was specified more than once . * @ throws TooManyTagsException * You ' ve reached the limit on the number of tags per load balancer . * @ throws LoadBalancerNotFoundException * The specified load balancer does not exist . * @ throws TargetGroupNotFoundException * The specified target group does not exist . * @ sample AmazonElasticLoadBalancing . AddTags * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticloadbalancingv2-2015-12-01 / AddTags " target = " _ top " > AWS * API Documentation < / a > */ @ Override public AddTagsResult addTags ( AddTagsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAddTags ( request ) ;
public class StreamMetadataTasks { /** * Helper method to check if scale operation against an epoch completed or not . * @ param scope scope . * @ param stream stream name . * @ param epoch stream epoch . * @ param context optional context * @ return returns the newly created segments . */ public CompletableFuture < ScaleStatusResponse > checkScale ( String scope , String stream , int epoch , OperationContext context ) { } }
CompletableFuture < EpochRecord > activeEpochFuture = streamMetadataStore . getActiveEpoch ( scope , stream , context , true , executor ) ; CompletableFuture < State > stateFuture = streamMetadataStore . getState ( scope , stream , true , context , executor ) ; CompletableFuture < EpochTransitionRecord > etrFuture = streamMetadataStore . getEpochTransition ( scope , stream , context , executor ) . thenApply ( VersionedMetadata :: getObject ) ; return CompletableFuture . allOf ( stateFuture , activeEpochFuture ) . handle ( ( r , ex ) -> { ScaleStatusResponse . Builder response = ScaleStatusResponse . newBuilder ( ) ; if ( ex != null ) { Throwable e = Exceptions . unwrap ( ex ) ; if ( e instanceof StoreException . DataNotFoundException ) { response . setStatus ( ScaleStatusResponse . ScaleStatus . INVALID_INPUT ) ; } else { response . setStatus ( ScaleStatusResponse . ScaleStatus . INTERNAL_ERROR ) ; } } else { EpochRecord activeEpoch = activeEpochFuture . join ( ) ; State state = stateFuture . join ( ) ; EpochTransitionRecord etr = etrFuture . join ( ) ; if ( epoch > activeEpoch . getEpoch ( ) ) { response . setStatus ( ScaleStatusResponse . ScaleStatus . INVALID_INPUT ) ; } else if ( activeEpoch . getEpoch ( ) == epoch || activeEpoch . getReferenceEpoch ( ) == epoch ) { response . setStatus ( ScaleStatusResponse . ScaleStatus . IN_PROGRESS ) ; } else { // active epoch = = scale epoch + 1 but the state is scaling , the previous workflow // has not completed . if ( epoch + 1 == activeEpoch . getReferenceEpoch ( ) && state . equals ( State . SCALING ) && ( etr . equals ( EpochTransitionRecord . EMPTY ) || etr . getNewEpoch ( ) == activeEpoch . getEpoch ( ) ) ) { response . setStatus ( ScaleStatusResponse . ScaleStatus . IN_PROGRESS ) ; } else { response . setStatus ( ScaleStatusResponse . ScaleStatus . SUCCESS ) ; } } } return response . build ( ) ; } ) ;
public class AbstractLIBORCovarianceModelParametric { /** * Return an instance of this model using a new set of parameters . * Note : To improve performance it is admissible to return the same instance of the object given that the parameters have not changed . Models should be immutable . * @ param parameters The new set of parameters . * @ return An instance of AbstractLIBORCovarianceModelParametric with modified parameters . */ public AbstractLIBORCovarianceModelParametric getCloneWithModifiedParameters ( RandomVariable [ ] parameters ) { } }
double [ ] parameterAsDouble = new double [ parameters . length ] ; for ( int i = 0 ; i < parameterAsDouble . length ; i ++ ) { parameterAsDouble [ i ] = parameters [ i ] . doubleValue ( ) ; } return getCloneWithModifiedParameters ( parameterAsDouble ) ;
public class AbstrStrMatcher { /** * Append the specified string to the delegate string * @ param separator * @ param join * @ return */ public M joinsWith ( String separator , Object ... join ) { } }
this . delegate = Optional . of ( joinWith ( separator , join ) ) ; return THIS ( ) ;
public class InternalXbaseLexer { /** * $ ANTLR start " RULE _ ID " */ public final void mRULE_ID ( ) throws RecognitionException { } }
try { int _type = RULE_ID ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalXbase . g : 6355:9 : ( ( ' ^ ' ) ? ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' $ ' | ' _ ' ) ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' $ ' | ' _ ' | ' 0 ' . . ' 9 ' ) * ) // InternalXbase . g : 6355:11 : ( ' ^ ' ) ? ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' $ ' | ' _ ' ) ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' $ ' | ' _ ' | ' 0 ' . . ' 9 ' ) * { // InternalXbase . g : 6355:11 : ( ' ^ ' ) ? int alt9 = 2 ; int LA9_0 = input . LA ( 1 ) ; if ( ( LA9_0 == '^' ) ) { alt9 = 1 ; } switch ( alt9 ) { case 1 : // InternalXbase . g : 6355:11 : ' ^ ' { match ( '^' ) ; } break ; } if ( input . LA ( 1 ) == '$' || ( input . LA ( 1 ) >= 'A' && input . LA ( 1 ) <= 'Z' ) || input . LA ( 1 ) == '_' || ( input . LA ( 1 ) >= 'a' && input . LA ( 1 ) <= 'z' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } // InternalXbase . g : 6355:44 : ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' $ ' | ' _ ' | ' 0 ' . . ' 9 ' ) * loop10 : do { int alt10 = 2 ; int LA10_0 = input . LA ( 1 ) ; if ( ( LA10_0 == '$' || ( LA10_0 >= '0' && LA10_0 <= '9' ) || ( LA10_0 >= 'A' && LA10_0 <= 'Z' ) || LA10_0 == '_' || ( LA10_0 >= 'a' && LA10_0 <= 'z' ) ) ) { alt10 = 1 ; } switch ( alt10 ) { case 1 : // InternalXbase . g : { if ( input . LA ( 1 ) == '$' || ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '9' ) || ( input . LA ( 1 ) >= 'A' && input . LA ( 1 ) <= 'Z' ) || input . LA ( 1 ) == '_' || ( input . LA ( 1 ) >= 'a' && input . LA ( 1 ) <= 'z' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; default : break loop10 ; } } while ( true ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class FeatureScopes { /** * This method serves as an entry point for the content assist scoping for features . * @ param featureCall the context provides access to the resource set . If it is an assignment , it * will be used to restrict scoping . * @ param receiver the receiver of the feature call . * @ param resolvedTypes TODO * @ param session TODO */ public IScope createFeatureCallScopeForReceiver ( final XExpression featureCall , final XExpression receiver , IFeatureScopeSession session , IResolvedTypes resolvedTypes ) { } }
if ( receiver == null || receiver . eIsProxy ( ) ) return IScope . NULLSCOPE ; LightweightTypeReference receiverType = resolvedTypes . getActualType ( receiver ) ; if ( receiverType != null && ! isUnknownReceiverType ( receiverType ) ) { JvmIdentifiableElement linkedReceiver = resolvedTypes . getLinkedFeature ( asAbstractFeatureCall ( receiver ) ) ; boolean typeLiteral = false ; IScope root = createTypeLiteralScope ( featureCall , receiver , session , resolvedTypes , receiverType , linkedReceiver ) ; if ( root != null ) { if ( featureCall instanceof XMemberFeatureCall && ( ( XMemberFeatureCall ) featureCall ) . isExplicitStatic ( ) ) { return root ; } typeLiteral = true ; } else { root = IScope . NULLSCOPE ; } // check if ' super ' was used as receiver which renders extension features and static features invalid if ( typeLiteral || isValidFeatureCallArgument ( receiver , linkedReceiver , session ) ) { // static members that are invoked on a receiver , e . g . myString . CASE _ INSENSITIVE _ ORDER IScope staticScope = createStaticScope ( asAbstractFeatureCall ( featureCall ) , receiver , receiverType , root , session , resolvedTypes ) ; // static extensions , if any , e . g . iterable . map [ ] , or things that have been imported by means of import static extension MyType IScope staticExtensionScope = createStaticExtensionsScope ( receiver , receiverType , featureCall , staticScope , session , resolvedTypes ) ; // instance extensions , e . g . extension ReflectionUtils with myObject . get ( ' privateField ' ) IScope extensionScope = createDynamicExtensionsScope ( receiver , receiverType , featureCall , staticExtensionScope , session , resolvedTypes ) ; // instance members , e . g . this . toString return createFeatureScopeForTypeRef ( receiver , receiverType , false , featureCall , session , linkedReceiver , extensionScope , true ) ; } else { // put only instance members into the scope return createFeatureScopeForTypeRef ( receiver , receiverType , false , featureCall , session , linkedReceiver , IScope . NULLSCOPE , true ) ; } } else if ( typeLiteralHelper . isPotentialTypeLiteral ( featureCall , resolvedTypes ) ) { IScope errorScope = createFollowUpErrorScope ( receiverType ) ; List < String > prefix = typeLiteralHelper . getTypeNameSegmentsFromConcreteSyntax ( ( XMemberFeatureCall ) featureCall ) ; if ( prefix == null ) { return errorScope ; } return createTypeLiteralScope ( featureCall , QualifiedName . create ( prefix ) , errorScope , session , resolvedTypes ) ; } else { return createFollowUpErrorScope ( receiverType ) ; }
public class ArtistActivity { /** * Used to position the track list at the bottom of the screen . */ private void setTrackListPadding ( ) { } }
mPlaylistRecyclerView . getViewTreeObserver ( ) . addOnPreDrawListener ( new ViewTreeObserver . OnPreDrawListener ( ) { @ Override public boolean onPreDraw ( ) { mPlaylistRecyclerView . getViewTreeObserver ( ) . removeOnPreDrawListener ( this ) ; int headerListHeight = getResources ( ) . getDimensionPixelOffset ( R . dimen . playback_view_height ) ; mPlaylistRecyclerView . setPadding ( 0 , mPlaylistRecyclerView . getHeight ( ) - headerListHeight , 0 , 0 ) ; mPlaylistRecyclerView . setAdapter ( mPlaylistAdapter ) ; // attach the dismiss gesture . new SwipeToDismissGesture . Builder ( SwipeToDismissDirection . HORIZONTAL ) . on ( mPlaylistRecyclerView ) . apply ( new DismissStrategy ( ) ) . backgroundColor ( getResources ( ) . getColor ( R . color . grey ) ) . build ( ) ; // hide if current play playlist is empty . if ( mPlaylistTracks . isEmpty ( ) ) { mPlaybackView . setTranslationY ( headerListHeight ) ; } return true ; } } ) ;
public class CommerceDiscountUtil { /** * Returns the last commerce discount in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce discount , or < code > null < / code > if a matching commerce discount could not be found */ public static CommerceDiscount fetchByUuid_Last ( String uuid , OrderByComparator < CommerceDiscount > orderByComparator ) { } }
return getPersistence ( ) . fetchByUuid_Last ( uuid , orderByComparator ) ;
public class AmazonDirectConnectClient { /** * Accepts a proposal request to attach a virtual private gateway to a Direct Connect gateway . * @ param acceptDirectConnectGatewayAssociationProposalRequest * @ return Result of the AcceptDirectConnectGatewayAssociationProposal operation returned by the service . * @ throws DirectConnectServerException * A server - side error occurred . * @ throws DirectConnectClientException * One or more parameters are not valid . * @ sample AmazonDirectConnect . AcceptDirectConnectGatewayAssociationProposal * @ see < a * href = " http : / / docs . aws . amazon . com / goto / WebAPI / directconnect - 2012-10-25 / AcceptDirectConnectGatewayAssociationProposal " * target = " _ top " > AWS API Documentation < / a > */ @ Override public AcceptDirectConnectGatewayAssociationProposalResult acceptDirectConnectGatewayAssociationProposal ( AcceptDirectConnectGatewayAssociationProposalRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAcceptDirectConnectGatewayAssociationProposal ( request ) ;
public class SubnetworkClient { /** * Retrieves an aggregated list of subnetworks . * < p > Sample code : * < pre > < code > * try ( SubnetworkClient subnetworkClient = SubnetworkClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( SubnetworksScopedList element : subnetworkClient . aggregatedListSubnetworks ( project ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final AggregatedListSubnetworksPagedResponse aggregatedListSubnetworks ( ProjectName project ) { } }
AggregatedListSubnetworksHttpRequest request = AggregatedListSubnetworksHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return aggregatedListSubnetworks ( request ) ;
public class JvmConstraintOwnerImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case TypesPackage . JVM_CONSTRAINT_OWNER__CONSTRAINTS : return constraints != null && ! constraints . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class ImageSet { /** * Get the list of non - file image managers . */ List < ImageManager > getNonFileImageManagers ( ) { } }
List < ImageManager > nonFile = new ArrayList < ImageManager > ( ) ; for ( ImageManager im : imageManagers ) { if ( ! ( im instanceof FileImageManager ) ) { nonFile . add ( im ) ; } } return nonFile ;
public class DistanceResolver { /** * Calculates the distance between two points * @ return distance between two points */ public static float distance ( final float ax , final float ay , final float bx , final float by ) { } }
return ( float ) Math . sqrt ( ( ax - bx ) * ( ax - bx ) + ( ay - by ) * ( ay - by ) ) ;
public class CommerceWishListLocalServiceBaseImpl { /** * Performs a dynamic query on the database and returns the matching rows . * @ param dynamicQuery the dynamic query * @ return the matching rows */ @ Override public < T > List < T > dynamicQuery ( DynamicQuery dynamicQuery ) { } }
return commerceWishListPersistence . findWithDynamicQuery ( dynamicQuery ) ;
public class Branch { /** * < p > A void call to indicate that the user has performed a specific action and for that to be * reported to the Branch API , with additional app - defined meta data to go along with that action . < / p > * @ param action A { @ link String } value to be passed as an action that the user has carried * out . For example " registered " or " logged in " . * @ param metadata A { @ link JSONObject } containing app - defined meta - data to be attached to a * user action that has just been completed . * @ param callback instance of { @ link BranchViewHandler . IBranchViewEvents } to listen Branch view events */ public void userCompletedAction ( @ NonNull final String action , JSONObject metadata , BranchViewHandler . IBranchViewEvents callback ) { } }
ServerRequest req = new ServerRequestActionCompleted ( context_ , action , metadata , callback ) ; if ( ! req . constructError_ && ! req . handleErrors ( context_ ) ) { handleNewRequest ( req ) ; }
public class S3ClientCache { /** * Returns a client for the requested region , or throws an exception when * unable . * @ param region * The region the returned { @ link AmazonS3 } will be * configured to use . * @ return A client for the given region from the cache , either instantiated * automatically from the provided { @ link AWSCredentials } or * provided with { @ link # useClient ( AmazonS3 ) } . * @ throws IllegalArgumentException * When a region is requested that has not been provided to the * cache with { @ link # useClient ( AmazonS3 ) } , and the cache * has no { @ link AWSCredentials } with which a client may be * instantiated . */ public AmazonS3 getClient ( Region region ) { } }
if ( region == null ) { throw new IllegalArgumentException ( "S3 region must be specified" ) ; } return getClient ( region . toAWSRegion ( ) . getName ( ) ) ;
public class JCuda { /** * Allocate a mipmapped array on the device . * < pre > * cudaError _ t cudaMallocMipmappedArray ( * cudaMipmappedArray _ t * mipmappedArray , * const cudaChannelFormatDesc * desc , * cudaExtent extent , * unsigned int numLevels , * unsigned int flags = 0 ) * < / pre > * < div > * < p > Allocate a mipmapped array on the device . * Allocates a CUDA mipmapped array according to the cudaChannelFormatDesc * structure < tt > desc < / tt > and returns a handle to the new CUDA mipmapped * array in < tt > * mipmappedArray < / tt > . < tt > numLevels < / tt > specifies the * number of mipmap levels to be allocated . This value is clamped to the * range [ 1 , 1 + floor ( log2 ( max ( width , height , * depth ) ) ) ] . * < p > The cudaChannelFormatDesc is defined * as : * < pre > struct cudaChannelFormatDesc { * int x , y , z , w ; * enum cudaChannelFormatKind * } ; < / pre > * where cudaChannelFormatKind is one of * cudaChannelFormatKindSigned , cudaChannelFormatKindUnsigned , or * cudaChannelFormatKindFloat . * < p > cudaMallocMipmappedArray ( ) can allocate * the following : * < ul > * < li > * < p > A 1D mipmapped array is * allocated if the height and depth extents are both zero . * < / li > * < li > * < p > A 2D mipmapped array is * allocated if only the depth extent is zero . * < / li > * < li > * < p > A 3D mipmapped array is * allocated if all three extents are non - zero . * < / li > * < li > * < p > A 1D layered CUDA mipmapped * array is allocated if only the height extent is zero and the * cudaArrayLayered flag is set . Each * layer is a 1D mipmapped array . * The number of layers is determined by the depth extent . * < / li > * < li > * < p > A 2D layered CUDA mipmapped * array is allocated if all three extents are non - zero and the * cudaArrayLayered flag is set . Each * layer is a 2D mipmapped array . * The number of layers is determined by the depth extent . * < / li > * < li > * < p > A cubemap CUDA mipmapped array * is allocated if all three extents are non - zero and the cudaArrayCubemap * flag is set . Width * must be equal to height , and * depth must be six . The order of the six layers in memory is the same * as that listed in cudaGraphicsCubeFace . * < / li > * < li > * < p > A cubemap layered CUDA mipmapped * array is allocated if all three extents are non - zero , and both , * cudaArrayCubemap and cudaArrayLayered * flags are set . Width must be * equal to height , and depth must be a multiple of six . A cubemap layered * CUDA mipmapped array * is a special type of 2D layered * CUDA mipmapped array that consists of a collection of cubemap mipmapped * arrays . The first * six layers represent the first * cubemap mipmapped array , the next six layers form the second cubemap * mipmapped array , and so * on . * < / li > * < / ul > * < p > The < tt > flags < / tt > parameter enables * different options to be specified that affect the allocation , as * follows . * < ul > * < li > * < p > cudaArrayDefault : This flag ' s * value is defined to be 0 and provides default mipmapped array * allocation * < / li > * < li > * < p > cudaArrayLayered : Allocates a * layered CUDA mipmapped array , with the depth extent indicating the * number of layers * < / li > * < li > * < p > cudaArrayCubemap : Allocates a * cubemap CUDA mipmapped array . Width must be equal to height , and depth * must be six . If the cudaArrayLayered * flag is also set , depth must be * a multiple of six . * < / li > * < li > * < p > cudaArraySurfaceLoadStore : This * flag indicates that individual mipmap levels of the CUDA mipmapped * array will be read from or written to using a surface * reference . * < / li > * < li > * < p > cudaArrayTextureGather : This * flag indicates that texture gather operations will be performed on the * CUDA array . Texture gather can only be performed * on 2D CUDA mipmapped arrays , * and the gather operations are performed only on the most detailed * mipmap level . * < / li > * < / ul > * < p > The width , height and depth extents must * meet certain size requirements as listed in the following table . All * values are specified * in elements . * < div > * < table cellpadding = " 4 " cellspacing = " 0 " summary = " " frame = " border " border = " 1 " rules = " all " > * < tbody > * < tr > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > < strong > CUDA array * type < / strong > * < / td > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > < strong > Valid * extents * { ( width range in * elements ) , ( height range ) , ( depth range ) } < / strong > * < / td > * < / tr > * < tr > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > 1D < / p > * < / td > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > { ( 1 , maxTexture1DMipmap ) , * 0 , 0 } * < / td > * < / tr > * < tr > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > 2D < / p > * < / td > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * ( 1 , maxTexture2DMipmap [ 0 ] ) , ( 1 , maxTexture2DMipmap [ 1 ] ) , 0 } * < / td > * < / tr > * < tr > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > 3D < / p > * < / td > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > { ( 1 , maxTexture3D [ 0 ] ) , * ( 1 , maxTexture3D [ 1 ] ) , ( 1 , maxTexture3D [ 2 ] ) } * < / td > * < / tr > * < tr > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > 1D Layered < / p > * < / td > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * ( 1 , maxTexture1DLayered [ 0 ] ) , 0 , ( 1 , maxTexture1DLayered [ 1 ] ) } * < / td > * < / tr > * < tr > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > 2D Layered < / p > * < / td > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * ( 1 , maxTexture2DLayered [ 0 ] ) , ( 1 , maxTexture2DLayered [ 1 ] ) , * ( 1 , maxTexture2DLayered [ 2 ] ) } * < / td > * < / tr > * < tr > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > Cubemap < / p > * < / td > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > { ( 1 , maxTextureCubemap ) , * ( 1 , maxTextureCubemap ) , 6 } * < / td > * < / tr > * < tr > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * < p > Cubemap Layered < / p > * < / td > * < td valign = " top " rowspan = " 1 " colspan = " 1 " > * ( 1 , maxTextureCubemapLayered [ 0 ] ) , ( 1 , maxTextureCubemapLayered [ 0 ] ) , * ( 1 , maxTextureCubemapLayered [ 1 ] ) } * < / td > * < / tr > * < / tbody > * < / table > * < / div > * < div > * < span > Note : < / span > * < p > Note that this * function may also return error codes from previous , asynchronous * launches . * < / div > * < / div > * @ param mipmappedArray Pointer to allocated mipmapped array in device memory * @ param desc Requested channel format * @ param extent Requested allocation size ( width field in elements ) * @ param numLevels Number of mipmap levels to allocate * @ param flags Flags for extensions * @ return cudaSuccess , cudaErrorMemoryAllocation * @ see JCuda # cudaMalloc3D * @ see JCuda # cudaMalloc * @ see JCuda # cudaMallocPitch * @ see JCuda # cudaFree * @ see JCuda # cudaFreeArray * @ see JCuda # cudaMallocHost * @ see JCuda # cudaFreeHost * @ see JCuda # cudaHostAlloc * @ see cudaExtent */ public static int cudaMallocMipmappedArray ( cudaMipmappedArray mipmappedArray , cudaChannelFormatDesc desc , cudaExtent extent , int numLevels , int flags ) { } }
return checkResult ( cudaMallocMipmappedArrayNative ( mipmappedArray , desc , extent , numLevels , flags ) ) ;
public class DirectionUtil { /** * Returns which of the sixteen compass directions is associated with the specified angle * theta . < em > Note : < / em > that the angle supplied is assumed to increase clockwise around the * origin ( which screen angles do ) rather than counter - clockwise around the origin ( which * cartesian angles do ) and < code > NORTH < / code > is considered to point toward the top of the * screen . */ public static int getFineDirection ( double theta ) { } }
theta = ( ( theta + Math . PI ) * 8 ) / Math . PI ; return ANGLE_MAP [ ( int ) Math . round ( theta ) % FINE_DIRECTION_COUNT ] ;
public class JsonObject { /** * Returns the value mapped by { @ code name } if it exists , coercing it if * necessary , or throws if no such mapping exists . * @ throws JsonException if no such mapping exists . */ public String getString ( String name , boolean strict ) throws JsonException { } }
JsonElement el = get ( name ) ; String res = null ; if ( strict && ! el . isString ( ) ) { throw Util . typeMismatch ( name , el , "string" , true ) ; } res = el . toString ( ) ; if ( res == null ) throw Util . typeMismatch ( name , el , "string" , strict ) ; return res ;
public class CheckHttp { /** * Apply logic to the http response and build metrics . * @ param opt * @ param response * @ param elapsed * @ return - The metrics * @ throws MetricGatheringException * List < Metric > */ private List < Metric > analyzeResponse ( final ICommandLine opt , final String response , final int elapsed ) throws MetricGatheringException { } }
List < Metric > metrics = new ArrayList < Metric > ( ) ; metrics . add ( new Metric ( "time" , "" , new BigDecimal ( elapsed ) , null , null ) ) ; if ( ! opt . hasOption ( "certificate" ) ) { if ( opt . hasOption ( "string" ) ) { boolean found = false ; String string = opt . getOptionValue ( "string" ) ; found = response . contains ( string ) ; metrics . add ( new Metric ( "string" , "" , new BigDecimal ( Utils . getIntValue ( found ) ) , null , null ) ) ; } if ( opt . hasOption ( "expect" ) ) { int count = 0 ; String [ ] values = opt . getOptionValue ( "expect" ) . split ( "," ) ; for ( String value : values ) { if ( response . contains ( value ) ) { count ++ ; } } metrics . add ( new Metric ( "expect" , String . valueOf ( count ) + " times. " , new BigDecimal ( count ) , null , null ) ) ; } if ( opt . hasOption ( "regex" ) ) { String regex = opt . getOptionValue ( "regex" ) ; Pattern p = null ; int flags = 0 ; if ( opt . hasOption ( "eregi" ) ) { flags = Pattern . CASE_INSENSITIVE ; } if ( opt . hasOption ( "linespan" ) ) { flags = flags | Pattern . MULTILINE ; } p = Pattern . compile ( regex , flags ) ; boolean found = p . matcher ( response ) . find ( ) ; if ( opt . hasOption ( "invert-regex" ) ) { metrics . add ( new Metric ( "invert-regex" , String . valueOf ( found ) , new BigDecimal ( Utils . getIntValue ( found ) ) , null , null ) ) ; } else { metrics . add ( new Metric ( "regex" , String . valueOf ( found ) , new BigDecimal ( Utils . getIntValue ( found ) ) , null , null ) ) ; } } } return metrics ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcDerivedUnitEnum createIfcDerivedUnitEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcDerivedUnitEnum result = IfcDerivedUnitEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class ResourceReaderImpl { /** * / * ( non - Javadoc ) * @ see net . crowmagnumb . util . ResourceReader # getColor ( java . lang . String , java . awt . Color ) */ @ Override public Color getColor ( final String key , final Color defaultValue ) { } }
return formatColor ( key , getFormattedPropValue ( key ) , defaultValue ) ;
public class ApiOvhDomain { /** * Create a new redirection ( Don ' t forget to refresh the zone ) * REST : POST / domain / zone / { zoneName } / redirection * @ param keywords [ required ] Keywords for invisible redirection * @ param title [ required ] Title for invisible redirection * @ param type [ required ] Redirection type * @ param target [ required ] Target of the redirection * @ param description [ required ] Desciption for invisible redirection * @ param subDomain [ required ] subdomain to redirect * @ param zoneName [ required ] The internal name of your zone */ public OvhRedirection zone_zoneName_redirection_POST ( String zoneName , String description , String keywords , String subDomain , String target , String title , OvhRedirectionTypeEnum type ) throws IOException { } }
String qPath = "/domain/zone/{zoneName}/redirection" ; StringBuilder sb = path ( qPath , zoneName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "description" , description ) ; addBody ( o , "keywords" , keywords ) ; addBody ( o , "subDomain" , subDomain ) ; addBody ( o , "target" , target ) ; addBody ( o , "title" , title ) ; addBody ( o , "type" , type ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhRedirection . class ) ;
public class ReflectionUtils { /** * Checks if the Class given is a primitive type . This includes the Java primitive types and their wrapper types . * @ param type the Class to examine * @ return true if the Class ' s type is considered a primitive type */ public static boolean isPrimitiveLike ( final Class type ) { } }
return type != null && ( type == String . class || type == char . class || type == Character . class || type == short . class || type == Short . class || type == Integer . class || type == int . class || type == Long . class || type == long . class || type == Double . class || type == double . class || type == float . class || type == Float . class || type == Boolean . class || type == boolean . class || type == Byte . class || type == byte . class || type == Date . class || type == Locale . class || type == Class . class || type == UUID . class || type == URI . class || type . isEnum ( ) ) ;
public class Gamma { /** * Returns the regularized gamma function P ( a , x ) . * The implementation of this method is based on : * < ul > * < li > * < a href = " http : / / mathworld . wolfram . com / RegularizedGammaFunction . html " > * Regularized Gamma Function < / a > , equation ( 1) * < / li > * < li > * < a href = " http : / / mathworld . wolfram . com / IncompleteGammaFunction . html " > * Incomplete Gamma Function < / a > , equation ( 4 ) . * < / li > * < li > * < a href = " http : / / mathworld . wolfram . com / ConfluentHypergeometricFunctionoftheFirstKind . html " > * Confluent Hypergeometric Function of the First Kind < / a > , equation ( 1 ) . * < / li > * < / ul > * @ param a the a parameter . * @ param x the value . * @ param epsilon When the absolute value of the nth item in the * series is less than epsilon the approximation ceases to calculate * further elements in the series . * @ param maxIterations Maximum number of " iterations " to complete . * @ return the regularized gamma function P ( a , x ) * @ throws IllegalStateException if the algorithm fails to converge . */ public static double regularizedGammaP ( double a , double x , double epsilon , int maxIterations ) { } }
double ret ; if ( Double . isNaN ( a ) || Double . isNaN ( x ) || ( a <= 0.0 ) || ( x < 0.0 ) ) { ret = Double . NaN ; } else if ( x == 0.0 ) { ret = 0.0 ; } else if ( x >= a + 1 ) { // use regularizedGammaQ because it should converge faster in this // case . ret = 1.0 - regularizedGammaQ ( a , x , epsilon , maxIterations ) ; } else { // calculate series double n = 0.0 ; // current element index double an = 1.0 / a ; // n - th element in the series double sum = an ; // partial sum while ( Math . abs ( an / sum ) > epsilon && n < maxIterations && sum < Double . POSITIVE_INFINITY ) { // compute next element in the series n += 1.0 ; an *= x / ( a + n ) ; // update partial sum sum += an ; } if ( n >= maxIterations ) { throw new IllegalStateException ( "max count exceeded: " + maxIterations ) ; } else if ( Double . isInfinite ( sum ) ) { ret = 1.0 ; } else { ret = Math . exp ( - x + ( a * Math . log ( x ) ) - logGamma ( a ) ) * sum ; } } return ret ;
public class Asn1Utils { /** * Encode an ASN . 1 IA5String . * @ param value * the value to be encoded * @ param buf * the buffer with space to the left of current position where the value will be encoded * @ return the length of the encoded data */ public static int encodeIA5String ( String value , ByteBuffer buf ) { } }
int pos = buf . position ( ) ; byte [ ] data = ( value == null ) ? new byte [ 0 ] : value . getBytes ( ) ; for ( int i = data . length - 1 ; i >= 0 ; i -- ) { pos -- ; buf . put ( pos , data [ i ] ) ; } buf . position ( buf . position ( ) - data . length ) ; int headerLength = DerUtils . encodeIdAndLength ( DerId . TagClass . UNIVERSAL , DerId . EncodingType . PRIMITIVE , ASN1_IA5STRING_TAG_NUM , data . length , buf ) ; return headerLength + data . length ;
public class Resolve { /** * where */ Type mostSpecificReturnType ( Type mt1 , Type mt2 ) { } }
Type rt1 = mt1 . getReturnType ( ) ; Type rt2 = mt2 . getReturnType ( ) ; if ( mt1 . hasTag ( FORALL ) && mt2 . hasTag ( FORALL ) ) { // if both are generic methods , adjust return type ahead of subtyping check rt1 = types . subst ( rt1 , mt1 . getTypeArguments ( ) , mt2 . getTypeArguments ( ) ) ; } // first use subtyping , then return type substitutability if ( types . isSubtype ( rt1 , rt2 ) ) { return mt1 ; } else if ( types . isSubtype ( rt2 , rt1 ) ) { return mt2 ; } else if ( types . returnTypeSubstitutable ( mt1 , mt2 ) ) { return mt1 ; } else if ( types . returnTypeSubstitutable ( mt2 , mt1 ) ) { return mt2 ; } else { return null ; }
public class StringUtils { /** * 连接字符串数组 * @ param strings 字符串数组 * @ param separator 分隔符 * @ return 按分隔符分隔的字符串 */ public static String join ( String [ ] strings , String separator ) { } }
if ( strings == null || strings . length == 0 ) { return EMPTY ; } StringBuilder sb = new StringBuilder ( ) ; for ( String string : strings ) { if ( isNotBlank ( string ) ) { sb . append ( string ) . append ( separator ) ; } } return sb . length ( ) > 0 ? sb . substring ( 0 , sb . length ( ) - separator . length ( ) ) : StringUtils . EMPTY ;
public class ExcelUtils { /** * 无模板 、 基于注解的数据导出 * @ param data 待导出数据 * @ param clazz { @ link com . github . crab2died . annotation . ExcelField } 映射对象Class * @ param isWriteHeader 是否写入表头 * @ param sheetName 指定导出Excel的sheet名称 * @ param isXSSF 导出的Excel是否为Excel2007及以上版本 ( 默认是 ) * @ param os 生成的Excel待输出数据流 * @ throws Excel4JException 异常 * @ throws IOException 异常 * @ author Crab2Died */ public void exportObjects2Excel ( List < ? > data , Class clazz , boolean isWriteHeader , String sheetName , boolean isXSSF , OutputStream os ) throws Excel4JException , IOException { } }
try ( Workbook workbook = exportExcelNoTemplateHandler ( data , clazz , isWriteHeader , sheetName , isXSSF ) ) { workbook . write ( os ) ; }
public class FileUtil { /** * 搜索jar里面的class * 注意jar的open和close * 返回类名和类的map集合 * @ throws IOException */ public static final Map < String , JarEntry > findClassByJar ( JarFile jarFile ) throws IOException { } }
Map < String , JarEntry > map = new HashMap < String , JarEntry > ( ) ; if ( jarFile == null ) { throw new RuntimeException ( "jarFile is Null" ) ; } String Suffix = ".class" ; Enumeration < JarEntry > jarEntries = jarFile . entries ( ) ; while ( jarEntries . hasMoreElements ( ) ) { // 遍历jar的实体对象 JarEntry jarEntry = jarEntries . nextElement ( ) ; if ( jarEntry . isDirectory ( ) || ! jarEntry . getName ( ) . endsWith ( Suffix ) ) { continue ; } String jarEntryName = jarEntry . getName ( ) ; // 类似 : sun / security / internal / interfaces / TlsMasterSecret . class String className = jarEntryName . substring ( 0 , jarEntryName . length ( ) - Suffix . length ( ) ) ; // sun / security / internal / interfaces / TlsMasterSecret // 注意 , jar文件里面的只能是 / 不能是 \ className = className . replace ( "/" , "." ) ; // sun . security . internal . interfaces . TlsMasterSecret map . put ( className , jarEntry ) ; } return map ;
public class CleverTapAPI { /** * Pushes everything available in the JSON object returned by the Facebook GraphRequest * @ param graphUser The object returned from Facebook */ @ SuppressWarnings ( { } }
"unused" , "WeakerAccess" } ) public void pushFacebookUser ( final JSONObject graphUser ) { postAsyncSafely ( "pushFacebookUser" , new Runnable ( ) { @ Override public void run ( ) { _pushFacebookUser ( graphUser ) ; } } ) ;
public class AWSCognitoIdentityProviderClient { /** * Deletes the user attributes in a user pool as an administrator . Works on any user . * Requires developer credentials . * @ param adminDeleteUserAttributesRequest * Represents the request to delete user attributes as an administrator . * @ return Result of the AdminDeleteUserAttributes operation returned by the service . * @ throws ResourceNotFoundException * This exception is thrown when the Amazon Cognito service cannot find the requested resource . * @ throws InvalidParameterException * This exception is thrown when the Amazon Cognito service encounters an invalid parameter . * @ throws TooManyRequestsException * This exception is thrown when the user has made too many requests for a given operation . * @ throws NotAuthorizedException * This exception is thrown when a user is not authorized . * @ throws UserNotFoundException * This exception is thrown when a user is not found . * @ throws InternalErrorException * This exception is thrown when Amazon Cognito encounters an internal error . * @ sample AWSCognitoIdentityProvider . AdminDeleteUserAttributes * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / AdminDeleteUserAttributes " * target = " _ top " > AWS API Documentation < / a > */ @ Override public AdminDeleteUserAttributesResult adminDeleteUserAttributes ( AdminDeleteUserAttributesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAdminDeleteUserAttributes ( request ) ;
public class MessageStoreImpl { /** * Notification that the configuration of the bus has changed . * @ param newBus The new bus configuration * @ param busChg true if the bus has changed * @ param destChg true if a destination has changed * @ param medChg true if a mediation has changed */ @ Override public void busReloaded ( Object newBus , boolean busChanged , boolean destChg , boolean medChg ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( this , tc , "busReloaded" ) ; SibTr . exit ( this , tc , "busReloaded" ) ; }
public class SamzaConfigFactory { /** * Generate a list of map ( of config properties ) for all PIs and EPI in * the input topology */ public List < Map < String , String > > getMapsForTopology ( SamzaTopology topology ) throws Exception { } }
List < Map < String , String > > maps = new ArrayList < Map < String , String > > ( ) ; // File to write serialized objects String filename = topology . getTopologyName ( ) + ".dat" ; Path dirPath = FileSystems . getDefault ( ) . getPath ( "dat" ) ; Path filePath = FileSystems . getDefault ( ) . getPath ( dirPath . toString ( ) , filename ) ; String dstPath = filePath . toString ( ) ; String resPath ; String filesystem ; if ( this . isLocalMode ) { filesystem = SystemsUtils . LOCAL_FS ; File dir = dirPath . toFile ( ) ; if ( ! dir . exists ( ) ) FileUtils . forceMkdir ( dir ) ; } else { filesystem = SystemsUtils . HDFS ; } // Correct system name for streams this . setSystemNameForStreams ( topology . getStreams ( ) ) ; // Add all PIs to a collection ( map ) Map < String , Object > piMap = new HashMap < String , Object > ( ) ; Set < EntranceProcessingItem > entranceProcessingItems = topology . getEntranceProcessingItems ( ) ; Set < IProcessingItem > processingItems = topology . getNonEntranceProcessingItems ( ) ; for ( EntranceProcessingItem epi : entranceProcessingItems ) { SamzaEntranceProcessingItem sepi = ( SamzaEntranceProcessingItem ) epi ; piMap . put ( sepi . getName ( ) , sepi ) ; } for ( IProcessingItem pi : processingItems ) { SamzaProcessingItem spi = ( SamzaProcessingItem ) pi ; piMap . put ( spi . getName ( ) , spi ) ; } // Serialize all PIs boolean serialized = false ; if ( this . isLocalMode ) { serialized = SystemsUtils . serializeObjectToLocalFileSystem ( piMap , dstPath ) ; resPath = dstPath ; } else { resPath = SystemsUtils . serializeObjectToHDFS ( piMap , dstPath ) ; serialized = resPath != null ; } if ( ! serialized ) { throw new Exception ( "Fail serialize map of PIs to file" ) ; } // MapConfig for all PIs for ( EntranceProcessingItem epi : entranceProcessingItems ) { SamzaEntranceProcessingItem sepi = ( SamzaEntranceProcessingItem ) epi ; maps . add ( this . getMapForEntrancePI ( sepi , resPath , filesystem ) ) ; } for ( IProcessingItem pi : processingItems ) { SamzaProcessingItem spi = ( SamzaProcessingItem ) pi ; maps . add ( this . getMapForPI ( spi , resPath , filesystem ) ) ; } return maps ;
public class InternalService { /** * Returns observable to add a participant to . * @ param conversationId ID of a conversation to add a participant to . * @ param callback Callback to deliver new session instance . */ public void getParticipants ( @ NonNull final String conversationId , @ Nullable Callback < ComapiResult < List < Participant > > > callback ) { } }
adapter . adapt ( getParticipants ( conversationId ) , callback ) ;
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1122:1 : assignmentOperator : ( ' = ' | ' + = ' | ' - = ' | ' * = ' | ' / = ' | ' & = ' | ' | = ' | ' ^ = ' | ' % = ' | ' < ' ' < ' ' = ' | ' > ' ' > ' ' = ' | ' > ' ' > ' ' > ' ' = ' ) ; */ public final void assignmentOperator ( ) throws RecognitionException { } }
int assignmentOperator_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 109 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1123:5 : ( ' = ' | ' + = ' | ' - = ' | ' * = ' | ' / = ' | ' & = ' | ' | = ' | ' ^ = ' | ' % = ' | ' < ' ' < ' ' = ' | ' > ' ' > ' ' = ' | ' > ' ' > ' ' > ' ' = ' ) int alt141 = 12 ; switch ( input . LA ( 1 ) ) { case 54 : { alt141 = 1 ; } break ; case 42 : { alt141 = 2 ; } break ; case 46 : { alt141 = 3 ; } break ; case 39 : { alt141 = 4 ; } break ; case 50 : { alt141 = 5 ; } break ; case 35 : { alt141 = 6 ; } break ; case 123 : { alt141 = 7 ; } break ; case 62 : { alt141 = 8 ; } break ; case 32 : { alt141 = 9 ; } break ; case 53 : { alt141 = 10 ; } break ; case 56 : { int LA141_11 = input . LA ( 2 ) ; if ( ( LA141_11 == 56 ) ) { int LA141_12 = input . LA ( 3 ) ; if ( ( synpred212_Java ( ) ) ) { alt141 = 11 ; } else if ( ( true ) ) { alt141 = 12 ; } } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } int nvaeMark = input . mark ( ) ; try { input . consume ( ) ; NoViableAltException nvae = new NoViableAltException ( "" , 141 , 11 , input ) ; throw nvae ; } finally { input . rewind ( nvaeMark ) ; } } } break ; default : if ( state . backtracking > 0 ) { state . failed = true ; return ; } NoViableAltException nvae = new NoViableAltException ( "" , 141 , 0 , input ) ; throw nvae ; } switch ( alt141 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1123:7 : ' = ' { match ( input , 54 , FOLLOW_54_in_assignmentOperator4899 ) ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1124:9 : ' + = ' { match ( input , 42 , FOLLOW_42_in_assignmentOperator4909 ) ; if ( state . failed ) return ; } break ; case 3 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1125:9 : ' - = ' { match ( input , 46 , FOLLOW_46_in_assignmentOperator4919 ) ; if ( state . failed ) return ; } break ; case 4 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1126:9 : ' * = ' { match ( input , 39 , FOLLOW_39_in_assignmentOperator4929 ) ; if ( state . failed ) return ; } break ; case 5 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1127:9 : ' / = ' { match ( input , 50 , FOLLOW_50_in_assignmentOperator4939 ) ; if ( state . failed ) return ; } break ; case 6 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1128:9 : ' & = ' { match ( input , 35 , FOLLOW_35_in_assignmentOperator4949 ) ; if ( state . failed ) return ; } break ; case 7 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1129:9 : ' | = ' { match ( input , 123 , FOLLOW_123_in_assignmentOperator4959 ) ; if ( state . failed ) return ; } break ; case 8 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1130:9 : ' ^ = ' { match ( input , 62 , FOLLOW_62_in_assignmentOperator4969 ) ; if ( state . failed ) return ; } break ; case 9 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1131:9 : ' % = ' { match ( input , 32 , FOLLOW_32_in_assignmentOperator4979 ) ; if ( state . failed ) return ; } break ; case 10 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1132:9 : ' < ' ' < ' ' = ' { match ( input , 53 , FOLLOW_53_in_assignmentOperator4989 ) ; if ( state . failed ) return ; match ( input , 53 , FOLLOW_53_in_assignmentOperator4991 ) ; if ( state . failed ) return ; match ( input , 54 , FOLLOW_54_in_assignmentOperator4993 ) ; if ( state . failed ) return ; } break ; case 11 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1133:9 : ' > ' ' > ' ' = ' { match ( input , 56 , FOLLOW_56_in_assignmentOperator5003 ) ; if ( state . failed ) return ; match ( input , 56 , FOLLOW_56_in_assignmentOperator5005 ) ; if ( state . failed ) return ; match ( input , 54 , FOLLOW_54_in_assignmentOperator5007 ) ; if ( state . failed ) return ; } break ; case 12 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1134:9 : ' > ' ' > ' ' > ' ' = ' { match ( input , 56 , FOLLOW_56_in_assignmentOperator5017 ) ; if ( state . failed ) return ; match ( input , 56 , FOLLOW_56_in_assignmentOperator5019 ) ; if ( state . failed ) return ; match ( input , 56 , FOLLOW_56_in_assignmentOperator5021 ) ; if ( state . failed ) return ; match ( input , 54 , FOLLOW_54_in_assignmentOperator5023 ) ; if ( state . failed ) return ; } break ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving if ( state . backtracking > 0 ) { memoize ( input , 109 , assignmentOperator_StartIndex ) ; } }
public class BigDecimalSQLTransform { /** * / * ( non - Javadoc ) * @ see com . abubusoft . kripton . processor . sqlite . transform . SQLTransform # generateWriteParam2WhereCondition ( com . squareup . javapoet . MethodSpec . Builder , com . abubusoft . kripton . processor . sqlite . model . SQLiteModelMethod , java . lang . String , com . squareup . javapoet . TypeName ) */ @ Override public void generateWriteParam2WhereCondition ( Builder methodBuilder , SQLiteModelMethod method , String paramName , TypeName paramTypeName ) { } }
methodBuilder . addCode ( "$L.toPlainString()" , paramName ) ;
public class CmsHtmlList { /** * Returns html code for printing the list . < p > * @ return html code */ public String printableHtml ( ) { } }
m_printable = true ; String html = listHtml ( ) ; m_printable = false ; return html ;
public class FutureUtils { /** * Run the given action after the completion of the given future . The given future can be * completed normally or exceptionally . In case of an exceptional completion the , the * action ' s exception will be added to the initial exception . * @ param future to wait for its completion * @ param runnable action which is triggered after the future ' s completion * @ return Future which is completed after the action has completed . This future can contain an exception , * if an error occurred in the given future or action . */ public static CompletableFuture < Void > runAfterwardsAsync ( CompletableFuture < ? > future , RunnableWithException runnable ) { } }
return runAfterwardsAsync ( future , runnable , ForkJoinPool . commonPool ( ) ) ;
public class D6CrudUpdateHelper { /** * Generate INSERT preparedSQL statement * @ param policy * RAW _ SQL or PREPARED _ STATEMENT * @ return */ String createUpdatePreparedSQLStatement ( D6Inex includeExcludeColumnNames ) { } }
final Set < String > columnNameSet = getAllColumnNames ( ) ; final StringGrabber sgSQL = new StringGrabber ( ) ; // Get the table name final DBTable table = mModelClazz . getAnnotation ( DBTable . class ) ; final String tableName = table . tableName ( ) ; sgSQL . append ( "UPDATE " + tableName + " SET " ) ; if ( includeExcludeColumnNames != null ) { // edit column names corresponding to include / exclude policy includeExcludeColumnNames . manipulate ( columnNameSet ) ; } // Scan all column names in the model class for ( String columnName : columnNameSet ) { sgSQL . append ( columnName ) ; sgSQL . append ( " = ?, " ) ; } // end of for ( String columnName : columnNameSet ) { if ( sgSQL . length ( ) > 2 ) { sgSQL . removeTail ( 2 ) ; } // add SQL where clause sgSQL . append ( " " ) ; sgSQL . append ( getWhereClause ( ) ) ; final String sql = sgSQL . toString ( ) ; log ( "#createUpdatePreparedSQLStatement sql=" + sql ) ; return sql ;
public class Browser { /** * Determines the text type for the current node using the mimeType ( if present ) and the extension . * @ return the type of the text file ( script language ) or " " */ public static String getTextType ( String mimeType , String extension ) { } }
String textType = null ; if ( StringUtils . isNotBlank ( mimeType ) ) { textType = EDITOR_MODES . get ( mimeType ) ; if ( StringUtils . isBlank ( textType ) ) { String [ ] parts = StringUtils . split ( mimeType , '/' ) ; if ( parts . length > 1 ) { textType = EDITOR_MODES . get ( parts [ 1 ] ) ; } if ( StringUtils . isBlank ( textType ) ) { if ( StringUtils . isNotBlank ( extension ) ) { textType = EDITOR_MODES . get ( extension ) ; } if ( StringUtils . isBlank ( textType ) ) { textType = EDITOR_MODES . get ( parts [ 0 ] ) ; } } } } if ( StringUtils . isBlank ( textType ) ) { if ( StringUtils . isNotBlank ( extension ) ) { textType = EDITOR_MODES . get ( extension ) ; } } if ( textType == null ) { textType = "" ; } return textType ;
public class KunderaCriteriaBuilder { /** * ( non - Javadoc ) * @ see * javax . persistence . criteria . CriteriaBuilder # count ( javax . persistence . criteria * . Expression ) */ @ Override public Expression < Long > count ( Expression < ? > arg0 ) { } }
// TODO Auto - generated method stub String arg1 = "Count(" + arg0 . getAlias ( ) + ")" ; return new AggregateExpression ( arg0 , arg1 ) ;
public class FileAuthHandler { /** * getJWT should return the user JWT associated with this connection . * This can return null for challenge only authentication , but for account / user * JWT - based authentication you need to return the JWT bytes here . * @ return the user JWT */ public char [ ] getJWT ( ) { } }
try { char [ ] jwtChars = null ; String fileToUse = this . jwtFile ; if ( this . chainFile != null ) { fileToUse = this . chainFile ; } byte [ ] data = Files . readAllBytes ( Paths . get ( fileToUse ) ) ; ByteBuffer bb = ByteBuffer . wrap ( data ) ; CharBuffer chars = StandardCharsets . UTF_8 . decode ( bb ) ; jwtChars = this . extract ( chars , 1 ) ; // jwt is always first // Clear things up as best we can chars . clear ( ) ; for ( int i = 0 ; i < chars . capacity ( ) ; i ++ ) { chars . put ( '\0' ) ; } bb . clear ( ) ; for ( int i = 0 ; i < data . length ; i ++ ) { data [ i ] = 0 ; } return jwtChars ; } catch ( Exception exp ) { throw new IllegalStateException ( "problem reading jwt" , exp ) ; }
public class ZipUtil { /** * Extract the zip file to the destination , optionally only the matching files and renaming the files * @ param zipFilePath zip file path * @ param destDir destination directory to contain files * @ param filter filter to select matching files * @ param rename renamer to use * @ param copier streamCopier to use * @ throws IOException on io error */ public static void extractZip ( final String zipFilePath , final File destDir , final FilenameFilter filter , final renamer rename , final streamCopier copier ) throws IOException { } }
try ( final ZipFile jar = new ZipFile ( zipFilePath ) ) { extractZip ( jar . entries ( ) , jar :: getInputStream , destDir , filter , rename , copier ) ; }
public class NodeXmlHandler { /** * ( non - Javadoc ) * @ see org . xml . sax . helpers . DefaultHandler # endElement ( java . lang . String , java . lang . String , java . lang . String ) */ @ Override public void endElement ( String uri , String localName , String qName ) throws SAXException { } }
if ( "Node" . equals ( qName ) ) { node = new Node ( this . name , this . np , this . ntype , QueueState . fromString ( this . state ) ) ; if ( StringUtils . isNotBlank ( properties ) ) { final String [ ] props = properties . split ( "," ) ; for ( final String prop : props ) { node . getProperties ( ) . add ( prop ) ; } } if ( StringUtils . isNotBlank ( status ) ) { final String [ ] statuses = status . split ( "," ) ; for ( final String statuss : statuses ) { if ( statuss . indexOf ( '=' ) > 0 ) { String [ ] temp = statuss . split ( "=" ) ; if ( temp . length == 2 ) node . getStatus ( ) . put ( temp [ 0 ] . trim ( ) , temp [ 1 ] . trim ( ) ) ; } } } if ( StringUtils . isNotBlank ( jobs ) ) { final String [ ] jobses = jobs . split ( "," ) ; for ( String jobss : jobses ) { if ( jobss . indexOf ( '/' ) > 0 ) { String [ ] temp = jobss . split ( "/" ) ; if ( temp . length == 2 ) { int index = Integer . parseInt ( temp [ 0 ] . trim ( ) ) ; String name = temp [ 1 ] ; Job job = new Job ( ) ; job . setQueueIndex ( index ) ; job . setName ( name ) ; node . getJobs ( ) . add ( job ) ; } } } } this . nodes . add ( node ) ; }
public class AmazonRDSClient { /** * Updates a manual DB snapshot , which can be encrypted or not encrypted , with a new engine version . * Amazon RDS supports upgrading DB snapshots for MySQL and Oracle . * @ param modifyDBSnapshotRequest * @ return Result of the ModifyDBSnapshot operation returned by the service . * @ throws DBSnapshotNotFoundException * < i > DBSnapshotIdentifier < / i > doesn ' t refer to an existing DB snapshot . * @ sample AmazonRDS . ModifyDBSnapshot * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / rds - 2014-10-31 / ModifyDBSnapshot " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DBSnapshot modifyDBSnapshot ( ModifyDBSnapshotRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeModifyDBSnapshot ( request ) ;
public class ge_scalarmult_base { /** * h = a * B * where a = a [ 0 ] + 256 * a [ 1 ] + . . . + 256 ^ 31 a [ 31] * B is the Ed25519 base point ( x , 4/5 ) with x positive . * Preconditions : * a [ 31 ] < = 127 */ public static void ge_scalarmult_base ( ge_p3 h , byte [ ] a ) { } }
byte [ ] e = new byte [ 64 ] ; byte carry ; ge_p1p1 r = new ge_p1p1 ( ) ; ge_p2 s = new ge_p2 ( ) ; ge_precomp t = new ge_precomp ( ) ; int i ; for ( i = 0 ; i < 32 ; ++ i ) { e [ 2 * i + 0 ] = ( byte ) ( ( a [ i ] >>> 0 ) & 15 ) ; e [ 2 * i + 1 ] = ( byte ) ( ( a [ i ] >>> 4 ) & 15 ) ; } /* each e [ i ] is between 0 and 15 */ /* e [ 63 ] is between 0 and 7 */ carry = 0 ; for ( i = 0 ; i < 63 ; ++ i ) { e [ i ] += carry ; carry = ( byte ) ( e [ i ] + 8 ) ; carry >>= 4 ; e [ i ] -= carry << 4 ; } e [ 63 ] += carry ; /* each e [ i ] is between - 8 and 8 */ ge_p3_0 . ge_p3_0 ( h ) ; for ( i = 1 ; i < 64 ; i += 2 ) { select ( t , i / 2 , e [ i ] ) ; ge_madd . ge_madd ( r , h , t ) ; ge_p1p1_to_p3 . ge_p1p1_to_p3 ( h , r ) ; } ge_p3_dbl . ge_p3_dbl ( r , h ) ; ge_p1p1_to_p2 . ge_p1p1_to_p2 ( s , r ) ; ge_p2_dbl . ge_p2_dbl ( r , s ) ; ge_p1p1_to_p2 . ge_p1p1_to_p2 ( s , r ) ; ge_p2_dbl . ge_p2_dbl ( r , s ) ; ge_p1p1_to_p2 . ge_p1p1_to_p2 ( s , r ) ; ge_p2_dbl . ge_p2_dbl ( r , s ) ; ge_p1p1_to_p3 . ge_p1p1_to_p3 ( h , r ) ; for ( i = 0 ; i < 64 ; i += 2 ) { select ( t , i / 2 , e [ i ] ) ; ge_madd . ge_madd ( r , h , t ) ; ge_p1p1_to_p3 . ge_p1p1_to_p3 ( h , r ) ; }
public class CmsSecurityManager { /** * Changes the lock of a resource to the current user , that is " steals " the lock from another user . < p > * @ param context the current request context * @ param resource the resource to change the lock for * @ throws CmsException if something goes wrong * @ see org . opencms . file . types . I _ CmsResourceType # changeLock ( CmsObject , CmsSecurityManager , CmsResource ) */ public void changeLock ( CmsRequestContext context , CmsResource resource ) throws CmsException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; checkOfflineProject ( dbc ) ; try { m_driverManager . changeLock ( dbc , resource , CmsLockType . EXCLUSIVE ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_CHANGE_LOCK_OF_RESOURCE_2 , context . getSitePath ( resource ) , " - " + e . getMessage ( ) ) , e ) ; } finally { dbc . clear ( ) ; }
public class FluentSelect { /** * Deselect the option at the given index . This is done by examining the " index " attribute of an * element , and not merely by counting . * @ param index The option at this index will be deselected */ public FluentSelect deselectByIndex ( final int index ) { } }
executeAndWrapReThrowIfNeeded ( new DeselectByIndex ( index ) , Context . singular ( context , "deselectByIndex" , null , index ) , true ) ; return new FluentSelect ( super . delegate , currentElement . getFound ( ) , this . context , monitor , booleanInsteadOfNotFoundException ) ;
public class SRTServletResponse { /** * Defines the content length for this response . This call must be made only once . * Not setting the content length may cause significant performance degradation . * @ param len The content length . */ public void setContentLength ( int len ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . entering ( CLASS_NAME , "setContentLength" , "length --> " + String . valueOf ( len ) + " [" + this + "]" ) ; } // d151464 - check the include flag WebAppDispatcherContext dispatchContext = ( WebAppDispatcherContext ) getRequest ( ) . getWebAppDispatcherContext ( ) ; if ( dispatchContext . isInclude ( ) == true ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "setContentLength" , nls . getString ( "Illegal.from.included.servlet" , "Illegal from included servlet" ) , "setContentLength length --> " + String . valueOf ( len ) ) ; // 311717 } } else { if ( _response != null ) { if ( ! _response . isCommitted ( ) ) { _response . setContentLength ( len ) ; } } _contentLength = len ; // LIBERTY _ bufferedOut . setLimit ( _ contentLength = len ) ; _bufferedWriter . setLimitLong ( _contentLength ) ; // still want to try to set the header even if the response is committed to throw the warning setIntHeader ( HEADER_CONTENT_LENGTH , len ) ; } if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . exiting ( CLASS_NAME , "setContentLength" ) ; }
public class ApproveSkillRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ApproveSkillRequest approveSkillRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( approveSkillRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( approveSkillRequest . getSkillId ( ) , SKILLID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }