signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Es6ExtractClasses { /** * Add at - constructor to the JSDoc of the given node . * @ param node */ private void addAtConstructor ( Node node ) { } }
JSDocInfoBuilder builder = JSDocInfoBuilder . maybeCopyFrom ( node . getJSDocInfo ( ) ) ; builder . recordConstructor ( ) ; node . setJSDocInfo ( builder . build ( ) ) ;
public class PrimaryBackupServerContext { /** * Returns the service context for the given request . */ private CompletableFuture < PrimaryBackupServiceContext > getService ( PrimitiveRequest request ) { } }
return services . computeIfAbsent ( request . primitive ( ) . name ( ) , n -> { PrimitiveType primitiveType = primitiveTypes . getPrimitiveType ( request . primitive ( ) . type ( ) ) ; PrimaryBackupServiceContext service = new PrimaryBackupServiceContext ( serverName , PrimitiveId . from ( request . primitive ( ) . name ( ) ) , primitiveType , request . primitive ( ) , threadContextFactory . createContext ( ) , clusterMembershipService , memberGroupService , protocol , primaryElection ) ; OrderedFuture < PrimaryBackupServiceContext > newOrderFuture = new OrderedFuture < > ( ) ; service . open ( ) . whenComplete ( ( v , e ) -> { if ( e != null ) { newOrderFuture . completeExceptionally ( e ) ; } else { newOrderFuture . complete ( service ) ; } } ) ; return newOrderFuture ; } ) ;
public class TreeTaggerWrapper { /** * tokenizes a given JCas object ' s document text using the chinese tokenization * script and adds the recognized tokens to the JCas object . * @ param jcas JCas object supplied by the pipeline */ private void tokenizeChinese ( JCas jcas ) { } }
try { // read tokenized text to add tokens to the jcas Process proc = ttprops . getChineseTokenizationProcess ( ) ; Logger . printDetail ( component , "Chinese tokenization: " + ttprops . chineseTokenizerPath ) ; BufferedReader in = new BufferedReader ( new InputStreamReader ( proc . getInputStream ( ) , "UTF-8" ) ) ; BufferedWriter out = new BufferedWriter ( new OutputStreamWriter ( proc . getOutputStream ( ) , "UTF-8" ) ) ; Integer tokenOffset = 0 ; // loop through all the lines in the stdout output String [ ] inSplits = jcas . getDocumentText ( ) . split ( "[\\r\\n]+" ) ; for ( String inSplit : inSplits ) { out . write ( inSplit ) ; out . newLine ( ) ; out . flush ( ) ; // do one initial read String s = in . readLine ( ) ; do { // break out of the loop if we ' ve read a null if ( s == null ) break ; String [ ] outSplits = s . split ( "\\s+" ) ; for ( String tok : outSplits ) { if ( jcas . getDocumentText ( ) . indexOf ( tok , tokenOffset ) < 0 ) throw new RuntimeException ( "Could not find token " + tok + " in JCas after tokenizing with Chinese tokenization script." ) ; // create tokens and add them to the jcas ' s indexes . Token newToken = new Token ( jcas ) ; newToken . setBegin ( jcas . getDocumentText ( ) . indexOf ( tok , tokenOffset ) ) ; newToken . setEnd ( newToken . getBegin ( ) + tok . length ( ) ) ; newToken . addToIndexes ( ) ; tokenOffset = newToken . getEnd ( ) ; } // break out of the loop if the next read will block if ( ! in . ready ( ) ) break ; s = in . readLine ( ) ; } while ( true ) ; } // clean up in . close ( ) ; proc . destroy ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; }
public class ChannelUtils { /** * Define a new endpoint definition using the input name and list of * properties . * @ param name * @ param config * @ return EndPointInfo * @ throws IllegalArgumentException if input values are incorrect */ private static EndPointInfo defineEndPoint ( EndPointMgr epm , String name , String [ ] config ) { } }
String host = null ; String port = null ; for ( int i = 0 ; i < config . length ; i ++ ) { String key = ChannelUtils . extractKey ( config [ i ] ) ; if ( "host" . equalsIgnoreCase ( key ) ) { host = ChannelUtils . extractValue ( config [ i ] ) ; } else if ( "port" . equalsIgnoreCase ( key ) ) { port = ChannelUtils . extractValue ( config [ i ] ) ; } } return epm . defineEndPoint ( name , host , Integer . parseInt ( port ) ) ;
public class JsonConfig { /** * Removes a JsonValueProcessor . < br > * [ Java - & gt ; JSON ] * @ param beanClass the class to which the property may belong * @ param propertyType the type of the property */ public void unregisterJsonValueProcessor ( Class beanClass , Class propertyType ) { } }
if ( beanClass != null && propertyType != null ) { beanTypeMap . remove ( beanClass , propertyType ) ; }
public class MainFrame { /** * GEN - LAST : event _ btExitActionPerformed */ private void btLaunchMouseEntered ( java . awt . event . MouseEvent evt ) // GEN - FIRST : event _ btLaunchMouseEntered { } }
// GEN - HEADEREND : event _ btLaunchMouseEntered this . setCursor ( new Cursor ( Cursor . HAND_CURSOR ) ) ;
public class ProductPartitionNode { /** * Puts the specified key / value pair in the map of custom parameters . * @ throws IllegalStateException if this node is not a biddable UNIT node */ public ProductPartitionNode putCustomParameter ( String key , String value ) { } }
if ( ! nodeState . supportsCustomParameters ( ) ) { throw new IllegalStateException ( String . format ( "Cannot set custom parameters on a %s node" , nodeState . getNodeType ( ) ) ) ; } this . nodeState . getCustomParams ( ) . put ( key , value ) ; return this ;
public class SeleniumSpec { /** * Searchs for two webelements dragging the first one to the second * @ param source * @ param destination * @ throws IllegalAccessException * @ throws IllegalArgumentException * @ throws SecurityException * @ throws NoSuchFieldException * @ throws ClassNotFoundException */ @ When ( "^I drag '([^:]*?):(.+?)' and drop it to '([^:]*?):(.+?)'$" ) public void seleniumDrag ( String smethod , String source , String dmethod , String destination ) throws ClassNotFoundException , NoSuchFieldException , SecurityException , IllegalArgumentException , IllegalAccessException { } }
Actions builder = new Actions ( commonspec . getDriver ( ) ) ; List < WebElement > sourceElement = commonspec . locateElement ( smethod , source , 1 ) ; List < WebElement > destinationElement = commonspec . locateElement ( dmethod , destination , 1 ) ; builder . dragAndDrop ( sourceElement . get ( 0 ) , destinationElement . get ( 0 ) ) . perform ( ) ;
public class DependencyResolver { /** * Configures the component with specified parameters . * @ param config configuration parameters to set . * @ throws ConfigException when configuration is wrong . * @ see ConfigParams */ @ Override public void configure ( ConfigParams config ) throws ConfigException { } }
ConfigParams dependencies = config . getSection ( "dependencies" ) ; for ( String name : dependencies . keySet ( ) ) { String locator = dependencies . get ( name ) ; if ( locator == null ) continue ; try { Descriptor descriptor = Descriptor . fromString ( locator ) ; if ( descriptor != null ) _dependencies . put ( name , descriptor ) ; else _dependencies . put ( name , locator ) ; } catch ( Exception ex ) { _dependencies . put ( name , locator ) ; } }
public class ListRestoreJobsResult { /** * An array of objects that contain detailed information about jobs to restore saved resources . * @ param restoreJobs * An array of objects that contain detailed information about jobs to restore saved resources . */ public void setRestoreJobs ( java . util . Collection < RestoreJobsListMember > restoreJobs ) { } }
if ( restoreJobs == null ) { this . restoreJobs = null ; return ; } this . restoreJobs = new java . util . ArrayList < RestoreJobsListMember > ( restoreJobs ) ;
public class KXmlParser { /** * Read an element declaration . This contains a name and a content spec . * < ! ELEMENT foo EMPTY > * < ! ELEMENT foo ( bar ? , ( baz | quux ) ) > * < ! ELEMENT foo ( # PCDATA | bar ) * > */ private void readElementDeclaration ( ) throws IOException , XmlPullParserException { } }
read ( START_ELEMENT ) ; skip ( ) ; readName ( ) ; readContentSpec ( ) ; skip ( ) ; read ( '>' ) ;
public class SpiExample { /** * Read data via SPI bus from MCP3002 chip . * @ throws IOException */ public static void read ( ) throws IOException , InterruptedException { } }
for ( short channel = 0 ; channel < ADC_CHANNEL_COUNT ; channel ++ ) { int conversion_value = getConversionValue ( channel ) ; console . print ( String . format ( " | %04d" , conversion_value ) ) ; // print 4 digits with leading zeros } console . print ( " |\r" ) ; Thread . sleep ( 250 ) ;
public class AbstractSEPAGV { /** * This is needed for verifyConstraints ( ) . Because verifyConstraints ( ) tries * to read the lowlevel - values for each constraint , the lowlevel - values for * sepa . xxx would always be empty ( because they do not exist in hbci * messages ) . So we read the sepa lowlevel - values from the special sepa * structure instead from the lowlevel params for the message * @ param key * @ return the lowlevel param . */ public String getLowlevelParam ( String key ) { } }
String result ; String intern = getName ( ) + ".sepa." ; if ( key . startsWith ( intern ) ) { String realKey = key . substring ( intern . length ( ) ) ; result = getPainParam ( realKey ) ; } else { result = super . getLowlevelParam ( key ) ; } return result ;
public class ServiceSpecification { /** * Generates a new { @ link ServiceSpecification } from the specified { @ link ServiceInfo } s and * the factory { @ link Function } that creates { @ link NamedTypeInfo } s for the enum , struct or exception types * referred by the specified { @ link ServiceInfo } s . */ public static ServiceSpecification generate ( Iterable < ServiceInfo > services , Function < TypeSignature , ? extends NamedTypeInfo > namedTypeInfoFactory ) { } }
if ( Iterables . isEmpty ( services ) ) { return emptyServiceSpecification ; } // Collect all named types referred by the services . final Set < TypeSignature > namedTypes = Streams . stream ( services ) . flatMap ( s -> s . findNamedTypes ( ) . stream ( ) ) . collect ( toImmutableSortedSet ( comparing ( TypeSignature :: name ) ) ) ; final Map < String , EnumInfo > enums = new HashMap < > ( ) ; final Map < String , StructInfo > structs = new HashMap < > ( ) ; final Map < String , ExceptionInfo > exceptions = new HashMap < > ( ) ; generateNamedTypeInfos ( namedTypeInfoFactory , enums , structs , exceptions , namedTypes ) ; return new ServiceSpecification ( services , enums . values ( ) , structs . values ( ) , exceptions . values ( ) ) ;
public class ConcurrentLinkedList { /** * ( non - Javadoc ) * @ see ws . sib . objectManager . List # listIterator ( ) */ public ListIterator listIterator ( ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "listIterator" ) ; ListIterator listIterator = subList ( null , null ) . listIterator ( ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "listIterator" , new Object [ ] { listIterator } ) ; return listIterator ;
public class RtcpPacketFactory { /** * Builds a packet containing an RTCP Sender Report . * @ param statistics * The statistics of the RTP session * @ return The RTCP packet */ private static RtcpSenderReport buildSenderReport ( RtpStatistics statistics , boolean padding ) { } }
/* * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * header | V = 2 | P | RC | PT = SR = 200 | length | * | SSRC of sender | * sender | NTP timestamp , most significant word | * info + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + * | NTP timestamp , least significant word | * | RTP timestamp | * | sender ' s packet count | * | sender ' s octet count | * report | SSRC _ 1 ( SSRC of first source ) | * block + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + * 1 | fraction lost | cumulative number of packets lost | * | extended highest sequence number received | * | interarrival jitter | * | last SR ( LSR ) | * | delay since last SR ( DLSR ) | * report | SSRC _ 2 ( SSRC of second source ) | * block + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + * | profile - specific extensions | */ long ssrc = statistics . getSsrc ( ) ; long currentTime = statistics . getCurrentTime ( ) ; TimeStamp ntpTs = new TimeStamp ( new Date ( currentTime ) ) ; long ntpSec = ntpTs . getSeconds ( ) ; long ntpFrac = ntpTs . getFraction ( ) ; long elapsedTime = statistics . getCurrentTime ( ) - statistics . getRtpSentOn ( ) ; long rtpTs = statistics . getRtpTimestamp ( ) + statistics . getRtpTime ( elapsedTime ) ; long psent = statistics . getRtpPacketsSent ( ) ; long osent = statistics . getRtpOctetsSent ( ) ; RtcpSenderReport senderReport = new RtcpSenderReport ( padding , ssrc , ntpSec , ntpFrac , rtpTs , psent , osent ) ; // Add receiver reports for each registered member List < Long > members = statistics . getMembersList ( ) ; for ( Long memberSsrc : members ) { if ( ssrc != memberSsrc ) { RtpMember memberStats = statistics . getMember ( memberSsrc . longValue ( ) ) ; RtcpReportBlock rcvrReport = buildSubReceiverReport ( memberStats ) ; senderReport . addReceiverReport ( rcvrReport ) ; } } return senderReport ;
public class HashCodeBuilder { /** * Uses reflection to build a valid hash code from the fields of { @ code object } . * It uses < code > AccessibleObject . setAccessible < / code > to gain access to private fields . This means that it will * throw a security exception if run under a security manager , if the permissions are not set up correctly . It is * also not as efficient as testing explicitly . * Transient members will be not be used , as they are likely derived fields , and not part of the value of the * < code > Object < / code > . * Static fields will not be tested . Superclass fields will be included . * Two randomly chosen , non - zero , odd numbers must be passed in . Ideally these should be different for each class , * however this is not vital . Prime numbers are preferred , especially for the multiplier . * @ param initialNonZeroOddNumber * a non - zero , odd number used as the initial value . This will be the returned * value if no fields are found to include in the hash code * @ param multiplierNonZeroOddNumber * a non - zero , odd number used as the multiplier * @ param object * the Object to create a < code > hashCode < / code > for * @ return int hash code * @ throws IllegalArgumentException * if the Object is < code > null < / code > * @ throws IllegalArgumentException * if the number is zero or even * @ see HashCodeExclude */ @ GwtIncompatible ( "incompatible method" ) public static int reflectionHashCode ( final int initialNonZeroOddNumber , final int multiplierNonZeroOddNumber , final Object object ) { } }
return reflectionHashCode ( initialNonZeroOddNumber , multiplierNonZeroOddNumber , object , false , null ) ;
public class JDBCStorableGenerator { /** * Generates code to call a PreparedStatement . setXxx ( int , Xxx ) method , with * the value of the given property . Assumes that PreparedStatement and int * index are on the stack . * If the property is a Lob , then pass in the optional lobTooLargeVar to * track if it was too large to insert / update . The type of lobTooLargeVar * must be the carbonado lob type . At runtime , if the variable ' s value is * not null , then lob was too large to insert . The value of the variable is * the original lob . An update statement needs to be issued after the load * to insert / update the large value . * @ param mode one of NORMAL , NOT _ NULL , INITIAL _ VERSION or INCREMENT _ VERSION * @ param instanceVar when null , assume properties are contained in * " this " . Otherwise , invoke property access methods on storable referenced * in var . * @ param lobArrayVar optional , used for lob properties * @ param lobIndex optional , used for lob properties */ private void setPreparedStatementValue ( CodeBuilder b , JDBCStorableProperty < ? > property , int mode , LocalVariable instanceVar , LocalVariable lobArrayVar , Integer lobIndex ) throws SupportException { } }
Class psClass = property . getPreparedStatementSetMethod ( ) . getParameterTypes ( ) [ 1 ] ; TypeDesc psType = TypeDesc . forClass ( psClass ) ; TypeDesc propertyType = TypeDesc . forClass ( property . getType ( ) ) ; StorablePropertyAdapter adapter = property . getAppliedAdapter ( ) ; if ( mode != INITIAL_VERSION ) { // Load storable to extract property value from . if ( instanceVar == null ) { b . loadThis ( ) ; } else { b . loadLocal ( instanceVar ) ; } } TypeDesc fromType ; if ( adapter == null ) { if ( mode != INITIAL_VERSION ) { // Get protected field directly , since no adapter . if ( instanceVar == null ) { b . loadField ( property . getName ( ) , propertyType ) ; } else { b . loadField ( instanceVar . getType ( ) , property . getName ( ) , propertyType ) ; } } fromType = propertyType ; } else { Class toClass = psClass ; if ( java . sql . Blob . class . isAssignableFrom ( toClass ) ) { toClass = com . amazon . carbonado . lob . Blob . class ; } else if ( java . sql . Clob . class . isAssignableFrom ( toClass ) ) { toClass = com . amazon . carbonado . lob . Clob . class ; } Method adaptMethod = adapter . findAdaptMethod ( property . getType ( ) , toClass ) ; if ( adaptMethod == null ) { if ( toClass == String . class ) { // Check if special case for converting character to String . adaptMethod = adapter . findAdaptMethod ( property . getType ( ) , char . class ) ; if ( adaptMethod == null ) { adaptMethod = adapter . findAdaptMethod ( property . getType ( ) , Character . class ) ; } } if ( adaptMethod == null ) { throw new SupportException ( "Unable to adapt " + property . getType ( ) + " to " + toClass . getName ( ) ) ; } } TypeDesc adaptType = TypeDesc . forClass ( adaptMethod . getReturnType ( ) ) ; if ( mode != INITIAL_VERSION ) { // Invoke special inherited protected method that gets the field // and invokes the adapter . Method was generated by // StorableGenerator . String methodName = property . getReadMethodName ( ) + '$' ; if ( instanceVar == null ) { b . invokeVirtual ( methodName , adaptType , null ) ; } else { b . invokeVirtual ( instanceVar . getType ( ) , methodName , adaptType , null ) ; } } fromType = adaptType ; } Label done = b . createLabel ( ) ; if ( mode == INITIAL_VERSION ) { CodeBuilderUtil . initialVersion ( b , fromType , 1 ) ; } else if ( mode == INCREMENT_VERSION ) { CodeBuilderUtil . incrementVersion ( b , fromType ) ; } else if ( ! fromType . isPrimitive ( ) && mode != NOT_NULL ) { // Handle case where property value is null . b . dup ( ) ; Label notNull = b . createLabel ( ) ; b . ifNullBranch ( notNull , false ) ; // Invoke setNull method instead . b . pop ( ) ; // discard duplicate null . b . loadConstant ( property . getDataType ( ) ) ; b . invokeInterface ( TypeDesc . forClass ( PreparedStatement . class ) , "setNull" , null , new TypeDesc [ ] { TypeDesc . INT , TypeDesc . INT } ) ; b . branch ( done ) ; notNull . setLocation ( ) ; } if ( Lob . class . isAssignableFrom ( fromType . toClass ( ) ) ) { // Run special conversion . LocalVariable lobVar = b . createLocalVariable ( null , fromType ) ; b . storeLocal ( lobVar ) ; LocalVariable columnVar = b . createLocalVariable ( null , TypeDesc . INT ) ; b . storeLocal ( columnVar ) ; LocalVariable psVar = b . createLocalVariable ( "ps" , TypeDesc . forClass ( PreparedStatement . class ) ) ; b . storeLocal ( psVar ) ; if ( lobArrayVar != null && lobIndex != null ) { // Prepare for update result . If too large , then array entry is not null . b . loadLocal ( lobArrayVar ) ; b . loadConstant ( lobIndex ) ; } pushJDBCSupport ( b ) ; b . loadLocal ( psVar ) ; b . loadLocal ( columnVar ) ; b . loadLocal ( lobVar ) ; // Stack looks like this : JDBCSupport , PreparedStatement , int ( column ) , Lob Method setValueMethod ; try { String name = fromType . toClass ( ) . getName ( ) ; name = "set" + name . substring ( name . lastIndexOf ( '.' ) + 1 ) + "Value" ; setValueMethod = JDBCSupport . class . getMethod ( name , PreparedStatement . class , int . class , fromType . toClass ( ) ) ; } catch ( NoSuchMethodException e ) { throw new UndeclaredThrowableException ( e ) ; } b . invoke ( setValueMethod ) ; if ( lobArrayVar == null || lobIndex == null ) { b . pop ( ) ; } else { b . storeToArray ( TypeDesc . OBJECT ) ; } } else { if ( psType == TypeDesc . STRING && fromType . toPrimitiveType ( ) == TypeDesc . CHAR ) { // Special case for converting character to String . b . convert ( fromType , fromType . toPrimitiveType ( ) ) ; b . invokeStatic ( String . class . getName ( ) , "valueOf" , TypeDesc . STRING , new TypeDesc [ ] { TypeDesc . CHAR } ) ; } else { b . convert ( fromType , psType ) ; } b . invoke ( property . getPreparedStatementSetMethod ( ) ) ; } done . setLocation ( ) ;
public class GroovyMain { /** * Parse the command line . * @ param options the options parser . * @ param args the command line args . * @ return parsed command line . * @ throws ParseException if there was a problem . */ private static CommandLine parseCommandLine ( Options options , String [ ] args ) throws ParseException { } }
CommandLineParser parser = new GroovyInternalPosixParser ( ) ; return parser . parse ( options , args , true ) ;
public class CoverageUtilities { /** * Transforms an array of values into a { @ link WritableRaster } . * @ param array the values to transform . * @ param divide the factor by which to divide the values . * @ param width the width of the resulting image . * @ param height the height of the resulting image . * @ return the raster . */ public static WritableRaster doubleArray2WritableRaster ( double [ ] array , int width , int height ) { } }
WritableRaster writableRaster = createWritableRaster ( width , height , null , null , null ) ; int index = 0 ; ; for ( int x = 0 ; x < width ; x ++ ) { for ( int y = 0 ; y < height ; y ++ ) { writableRaster . setSample ( x , y , 0 , array [ index ++ ] ) ; } } return writableRaster ;
public class physical_disk { /** * < pre > * Performs generic data validation for the operation to be performed * < / pre > */ protected void validate ( String operationType ) throws Exception { } }
super . validate ( operationType ) ; MPSString id_validator = new MPSString ( ) ; id_validator . validate ( operationType , id , "\"id\"" ) ; MPSInt adapter_id_validator = new MPSInt ( ) ; adapter_id_validator . validate ( operationType , adapter_id , "\"adapter_id\"" ) ; MPSInt enclosure_validator = new MPSInt ( ) ; enclosure_validator . validate ( operationType , enclosure , "\"enclosure\"" ) ; MPSInt slot_validator = new MPSInt ( ) ; slot_validator . validate ( operationType , slot , "\"slot\"" ) ; MPSInt deviceid_validator = new MPSInt ( ) ; deviceid_validator . validate ( operationType , deviceid , "\"deviceid\"" ) ; MPSString wwn_validator = new MPSString ( ) ; wwn_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; wwn_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; wwn_validator . validate ( operationType , wwn , "\"wwn\"" ) ; MPSString mec_validator = new MPSString ( ) ; mec_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; mec_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; mec_validator . validate ( operationType , mec , "\"mec\"" ) ; MPSString oec_validator = new MPSString ( ) ; oec_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; oec_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; oec_validator . validate ( operationType , oec , "\"oec\"" ) ; MPSString size_validator = new MPSString ( ) ; size_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; size_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; size_validator . validate ( operationType , size , "\"size\"" ) ; MPSString firmwarestate_validator = new MPSString ( ) ; firmwarestate_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; firmwarestate_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; firmwarestate_validator . validate ( operationType , firmwarestate , "\"firmwarestate\"" ) ; MPSString foreignstate_validator = new MPSString ( ) ; foreignstate_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; foreignstate_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; foreignstate_validator . validate ( operationType , foreignstate , "\"foreignstate\"" ) ; MPSString mediatype_validator = new MPSString ( ) ; mediatype_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; mediatype_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; mediatype_validator . validate ( operationType , mediatype , "\"mediatype\"" ) ; MPSString inquiry_validator = new MPSString ( ) ; inquiry_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; inquiry_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; inquiry_validator . validate ( operationType , inquiry , "\"inquiry\"" ) ; MPSIPAddress host_ip_address_validator = new MPSIPAddress ( ) ; host_ip_address_validator . validate ( operationType , host_ip_address , "\"host_ip_address\"" ) ; MPSBoolean is_used_validator = new MPSBoolean ( ) ; is_used_validator . validate ( operationType , is_used , "\"is_used\"" ) ; MPSString logical_disk_name_validator = new MPSString ( ) ; logical_disk_name_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; logical_disk_name_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; logical_disk_name_validator . validate ( operationType , logical_disk_name , "\"logical_disk_name\"" ) ; MPSInt ld_number_validator = new MPSInt ( ) ; ld_number_validator . validate ( operationType , ld_number , "\"ld_number\"" ) ; MPSInt disk_index_validator = new MPSInt ( ) ; disk_index_validator . validate ( operationType , disk_index , "\"disk_index\"" ) ;
public class MaterializedViewSupervisor { /** * Find intervals in which derived dataSource should rebuild the segments . * Choose the latest intervals to create new HadoopIndexTask and submit it . */ @ VisibleForTesting void checkSegmentsAndSubmitTasks ( ) { } }
synchronized ( taskLock ) { List < Interval > intervalsToRemove = new ArrayList < > ( ) ; for ( Map . Entry < Interval , HadoopIndexTask > entry : runningTasks . entrySet ( ) ) { Optional < TaskStatus > taskStatus = taskStorage . getStatus ( entry . getValue ( ) . getId ( ) ) ; if ( ! taskStatus . isPresent ( ) || ! taskStatus . get ( ) . isRunnable ( ) ) { intervalsToRemove . add ( entry . getKey ( ) ) ; } } for ( Interval interval : intervalsToRemove ) { runningTasks . remove ( interval ) ; runningVersion . remove ( interval ) ; } if ( runningTasks . size ( ) == maxTaskCount ) { // if the number of running tasks reach the max task count , supervisor won ' t submit new tasks . return ; } Pair < SortedMap < Interval , String > , Map < Interval , List < DataSegment > > > toBuildIntervalAndBaseSegments = checkSegments ( ) ; SortedMap < Interval , String > sortedToBuildVersion = toBuildIntervalAndBaseSegments . lhs ; Map < Interval , List < DataSegment > > baseSegments = toBuildIntervalAndBaseSegments . rhs ; missInterval = sortedToBuildVersion . keySet ( ) ; submitTasks ( sortedToBuildVersion , baseSegments ) ; }
public class GIMDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GIMD__DATA : setDATA ( DATA_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class SftpFileAttributes { /** * Set the UID of the owner . * @ param uid */ public void setUID ( String uid ) { } }
if ( version > 3 ) { flags |= SSH_FILEXFER_ATTR_OWNERGROUP ; } else flags |= SSH_FILEXFER_ATTR_UIDGID ; this . uid = uid ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractTimeReferenceSystemType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractTimeReferenceSystemType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "_TimeReferenceSystem" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "Definition" ) public JAXBElement < AbstractTimeReferenceSystemType > create_TimeReferenceSystem ( AbstractTimeReferenceSystemType value ) { } }
return new JAXBElement < AbstractTimeReferenceSystemType > ( __TimeReferenceSystem_QNAME , AbstractTimeReferenceSystemType . class , null , value ) ;
public class JSConsumerSet { /** * Determine the index of the getCursor to use based on the classification of a * message . * @ param msg * @ return */ public int getGetCursorIndex ( SIMPMessage msg ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getGetCursorIndex" ) ; // The zeroth index is reserved for non - classified messages int classPos = 0 ; synchronized ( classifications ) { if ( classifications . getNumberOfClasses ( ) > 0 ) { // Need to get the classification out of the message String keyClassification = msg . getMessageControlClassification ( true ) ; // In the special case where the weighting for the classification in the // message was zero , we use the Default cursor if ( keyClassification != null && classifications . getWeight ( keyClassification ) > 0 ) // Get the position of the classification classPos = classifications . getPosition ( keyClassification ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getGetCursorIndex" , classPos ) ; return classPos ;
public class Sign1Message { /** * Validate the signature on the message using the passed in key . * @ param cnKey key to use for validation * @ return true if the signature validates * @ throws CoseException Errors generated by the COSE module */ public boolean validate ( OneKey cnKey ) throws CoseException { } }
CBORObject obj = CBORObject . NewArray ( ) ; obj . Add ( contextString ) ; if ( objProtected . size ( ) > 0 ) obj . Add ( rgbProtected ) ; else obj . Add ( CBORObject . FromObject ( new byte [ 0 ] ) ) ; obj . Add ( externalData ) ; obj . Add ( rgbContent ) ; return validateSignature ( obj . EncodeToBytes ( ) , rgbSignature , cnKey ) ;
public class AbstractManifestServlet { /** * Sub - classes can override this method to perform handle the inability to find a matching permutation . In which * case the method should return true if the response has been handled . */ protected boolean handleUnmatchedRequest ( final HttpServletRequest request , final HttpServletResponse response , final String moduleName , final String baseUrl , final List < BindingProperty > computedBindings ) throws ServletException , IOException { } }
return false ;
public class HtmlSerialFieldWriter { /** * Add the tag information for this member . * @ param field the field to document . * @ param contentTree the tree to which the member tags info will be added */ public void addMemberTags ( FieldDoc field , Content contentTree ) { } }
Content tagContent = new ContentBuilder ( ) ; TagletWriter . genTagOuput ( configuration . tagletManager , field , configuration . tagletManager . getCustomTaglets ( field ) , writer . getTagletWriterInstance ( false ) , tagContent ) ; Content dlTags = new HtmlTree ( HtmlTag . DL ) ; dlTags . addContent ( tagContent ) ; contentTree . addContent ( dlTags ) ; // TODO : what if empty ?
public class NameSpaceBinderImpl { /** * Store the binding information for later use . * @ see com . ibm . ws . ejbcontainer . runtime . NameSpaceBinder # createBindingObject ( com . ibm . ejs . container . HomeRecord , com . ibm . websphere . csi . HomeWrapperSet , java . lang . String , boolean , * boolean ) */ @ Override public EJBBinding createBindingObject ( HomeRecord hr , HomeWrapperSet homeSet , String interfaceName , int interfaceIndex , boolean local ) { } }
return new EJBBinding ( hr , interfaceName , interfaceIndex , local ) ;
public class DistributionPointFetcher { /** * Append relative name to the issuer name and return a new * GeneralNames object . */ private static GeneralNames getFullNames ( X500Name issuer , RDN rdn ) throws IOException { } }
List < RDN > rdns = new ArrayList < > ( issuer . rdns ( ) ) ; rdns . add ( rdn ) ; X500Name fullName = new X500Name ( rdns . toArray ( new RDN [ 0 ] ) ) ; GeneralNames fullNames = new GeneralNames ( ) ; fullNames . add ( new GeneralName ( fullName ) ) ; return fullNames ;
public class Form { /** * Render the end of this form . * @ throws JspException if a JSP exception has occurred */ public int doEndTag ( ) throws JspException { } }
if ( hasErrors ( ) ) return reportAndExit ( EVAL_PAGE ) ; String idScript = null ; // Create an appropriate " form " element based on our parameters HttpServletRequest request = ( HttpServletRequest ) pageContext . getRequest ( ) ; ServletContext servletContext = pageContext . getServletContext ( ) ; // if we have an Id or a tag is forcing the name , then set the state and generate // the JavaScript if ( _state . id != null || _setRealName ) { String id = _state . id ; _state . name = _realName ; _state . id = _realName ; idScript = renderNameAndId ( request , id ) ; } if ( _state . method == null ) _state . method = FORM_POST ; // Encode the action // If the rewritten form action contains request parameters , turn them into hidden fields - - // it ' s not legal to include them in the action URI on a GET . HttpServletResponse response = ( HttpServletResponse ) pageContext . getResponse ( ) ; if ( _actionUrl != null ) { _state . action = response . encodeURL ( _actionUrl ) ; } WriteRenderAppender writer = new WriteRenderAppender ( pageContext ) ; TagRenderingBase br = TagRenderingBase . Factory . getRendering ( TagRenderingBase . FORM_TAG , request ) ; br . doStartTag ( writer , _state ) ; // If the action we ' re submitting to is checking for double - submits , save a token in the session . // This will be written out as a hidden param ( below ) , and will be checked in PageFlowRequestProcessor . String token = PageFlowTagUtils . getToken ( request , _mapping ) ; // Add a transaction token ( if present in our session ) HttpSession session = pageContext . getSession ( ) ; if ( session != null ) { if ( token == null ) { token = ( String ) session . getAttribute ( Globals . TRANSACTION_TOKEN_KEY ) ; } if ( token != null ) { String name = URLRewriterService . getNamePrefix ( servletContext , request , Constants . TOKEN_KEY ) + Constants . TOKEN_KEY ; writeHiddenParam ( name , token , writer , request , true ) ; } } // add a hidden value for each parameter if ( _params != null ) { Iterator paramKeys = _params . keySet ( ) . iterator ( ) ; while ( paramKeys . hasNext ( ) ) { Object paramKey = paramKeys . next ( ) ; Object paramValue = _params . get ( paramKey ) ; if ( paramValue instanceof String [ ] ) { String [ ] paramValArray = ( String [ ] ) paramValue ; for ( int i = 0 ; i < paramValArray . length ; i ++ ) { String name = paramKey . toString ( ) ; String paramName = URLRewriterService . getNamePrefix ( servletContext , request , name ) + name ; writeHiddenParam ( paramName , paramValArray [ i ] , writer , request , true ) ; } } else { String name = paramKey . toString ( ) ; String paramName = URLRewriterService . getNamePrefix ( servletContext , request , name ) + name ; writeHiddenParam ( paramName , paramValue . toString ( ) , writer , request , true ) ; } } } // add the extra hidden parameters if ( _extraHiddenParams != null && _extraHiddenParams . size ( ) > 0 ) { for ( Iterator i = _extraHiddenParams . keySet ( ) . iterator ( ) ; i . hasNext ( ) ; ) { String name = ( String ) i . next ( ) ; for ( Iterator j = ( ( List ) _extraHiddenParams . get ( name ) ) . iterator ( ) ; j . hasNext ( ) ; ) { String value = ( String ) j . next ( ) ; writeHiddenParam ( name , value , writer , request , true ) ; } } } // add the body content if ( _text != null ) write ( _text ) ; // Remove the page scope attributes we created pageContext . removeAttribute ( Constants . BEAN_KEY , PageContext . REQUEST_SCOPE ) ; pageContext . removeAttribute ( Constants . FORM_KEY , PageContext . REQUEST_SCOPE ) ; ImplicitObjectUtil . unloadActionForm ( pageContext ) ; // Render a tag representing the end of our current form br . doEndTag ( writer ) ; // Render JavaScript to set the input focus if required if ( ( _focus != null ) && ( _focusMap != null ) ) { String focusName = ( String ) _focusMap . get ( _focus ) ; if ( focusName != null ) { String formName = _realName ; ScriptRequestState srs = ScriptRequestState . getScriptRequestState ( request ) ; srs . writeFeature ( getScriptReporter ( ) , writer , CoreScriptFeature . SET_FOCUS , false , true , new Object [ ] { formName , focusName } ) ; } } if ( _formSubmit ) { ScriptRequestState srs = ScriptRequestState . getScriptRequestState ( request ) ; srs . writeFeature ( getScriptReporter ( ) , writer , CoreScriptFeature . ANCHOR_SUBMIT , true , false , null ) ; } // output any generated javascript if ( idScript != null ) write ( idScript ) ; localRelease ( ) ; return EVAL_PAGE ;
public class FacesConfigTypeImpl { /** * If not already created , a new < code > lifecycle < / code > element will be created and returned . * Otherwise , the first existing < code > lifecycle < / code > element will be returned . * @ return the instance defined for the element < code > lifecycle < / code > */ public FacesConfigLifecycleType < FacesConfigType < T > > getOrCreateLifecycle ( ) { } }
List < Node > nodeList = childNode . get ( "lifecycle" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FacesConfigLifecycleTypeImpl < FacesConfigType < T > > ( this , "lifecycle" , childNode , nodeList . get ( 0 ) ) ; } return createLifecycle ( ) ;
public class GroovyScript2RestLoaderPlugin { /** * Get working repository name . Returns the repository name from configuration * if it previously configured and returns the name of current repository in other case . * @ return String * repository name */ public String getRepository ( ) { } }
if ( repository == null ) { try { return repositoryService . getCurrentRepository ( ) . getConfiguration ( ) . getName ( ) ; } catch ( RepositoryException e ) { throw new RuntimeException ( "Can not get current repository and repository name was not configured" , e ) ; } } else { return repository ; }
public class ImagePipeline { /** * Returns a DataSource supplier that will on get submit the request for execution and return a * DataSource representing the pending results of the task . * @ param imageRequest the request to submit ( what to execute ) . * @ return a DataSource representing pending results and completion of the request */ public Supplier < DataSource < CloseableReference < PooledByteBuffer > > > getEncodedImageDataSourceSupplier ( final ImageRequest imageRequest , final Object callerContext ) { } }
return new Supplier < DataSource < CloseableReference < PooledByteBuffer > > > ( ) { @ Override public DataSource < CloseableReference < PooledByteBuffer > > get ( ) { return fetchEncodedImage ( imageRequest , callerContext ) ; } @ Override public String toString ( ) { return Objects . toStringHelper ( this ) . add ( "uri" , imageRequest . getSourceUri ( ) ) . toString ( ) ; } } ;
public class ElementsExceptionsFactory { /** * Constructs and initializes a new { @ link DataAccessException } with the given { @ link Throwable cause } * and { @ link String message } formatted with the given { @ link Object [ ] arguments } . * @ param cause { @ link Throwable } identified as the reason this { @ link DataAccessException } was thrown . * @ param message { @ link String } describing the { @ link DataAccessException exception } . * @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } . * @ return a new { @ link DataAccessException } with the given { @ link Throwable cause } and { @ link String message } . * @ see org . cp . elements . dao . DataAccessException */ public static DataAccessException newDataAccessException ( Throwable cause , String message , Object ... args ) { } }
return new DataAccessException ( format ( message , args ) , cause ) ;
public class OgmTableGenerator { /** * NOTE : Copied from TableGenerator * Determine the table name to use for the generator values . * Called during { @ link # configure configuration } . * @ see # getTableName ( ) * @ param params The params supplied in the generator config ( plus some standard useful extras ) . * @ param dialect The dialect in effect * @ return The table name to use . */ protected QualifiedName determineGeneratorTableName ( Properties params , Dialect dialect ) { } }
String schemaName = params . getProperty ( PersistentIdentifierGenerator . SCHEMA ) ; if ( schemaName != null ) { log . schemaOptionNotSupportedForTableGenerator ( schemaName ) ; } String catalogName = params . getProperty ( PersistentIdentifierGenerator . CATALOG ) ; if ( catalogName != null ) { log . catalogOptionNotSupportedForTableGenerator ( catalogName ) ; } return QualifiedNameParser . INSTANCE . parse ( ConfigurationHelper . getString ( TABLE_PARAM , params , DEF_TABLE ) , null , null ) ;
public class IfcRegularTimeSeriesImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcTimeSeriesValue > getValues ( ) { } }
return ( EList < IfcTimeSeriesValue > ) eGet ( Ifc4Package . Literals . IFC_REGULAR_TIME_SERIES__VALUES , true ) ;
public class InjectorImpl { /** * Creates a new bean instance for a given InjectionPoint , such as a * method or field . */ @ Override public < T > T instance ( InjectionPoint < T > ip ) { } }
Objects . requireNonNull ( ip ) ; Provider < T > provider = provider ( ip ) ; if ( provider != null ) { return provider . get ( ) ; } else { return null ; }
public class FP64 { /** * Returns the value of this fingerprint as an unsigned integer encoded * in base 16 ( hexideicmal ) , padded with leading zeros to a total length * of 16 characters . < p > * < b > Important : < / b > If the output of this function is subsequently * fingerprinted , the probabilistic guarantee is lost . That is , * there is a much higher liklihood of fingerprint collisions if * fingerprint values are themselves fingerprinted in any way . */ public String toHexString ( ) { } }
String res = Long . toHexString ( _fp ) ; int len = res . length ( ) ; if ( len < 16 ) { res = LEADING_ZEROS . substring ( len ) + res ; assert res . length ( ) == 16 ; } return res ;
public class Redwood { /** * Hide multiple channels . All other channels will be shown . * @ param channels The channels to hide */ public static void hideOnlyChannels ( Object ... channels ) { } }
for ( LogRecordHandler handler : handlers ) { if ( handler instanceof VisibilityHandler ) { VisibilityHandler visHandler = ( VisibilityHandler ) handler ; visHandler . showAll ( ) ; for ( Object channel : channels ) { visHandler . alsoHide ( channel ) ; } } }
public class OStorageEmbedded { /** * Executes the command request and return the result back . */ public Object command ( final OCommandRequestText iCommand ) { } }
final OCommandExecutor executor = OCommandManager . instance ( ) . getExecutor ( iCommand ) ; executor . setProgressListener ( iCommand . getProgressListener ( ) ) ; executor . parse ( iCommand ) ; return executeCommand ( iCommand , executor ) ;
public class MessageSetImpl { /** * Gets up to a given amount of messages in the given channel after a given message in any channel . * @ param channel The channel of the messages . * @ param limit The limit of messages to get . * @ param after Get messages after the message with this id . * @ return The messages . * @ see # getMessagesAfterAsStream ( TextChannel , long ) */ public static CompletableFuture < MessageSet > getMessagesAfter ( TextChannel channel , int limit , long after ) { } }
return getMessages ( channel , limit , - 1 , after ) ;
public class QueuedKeyedResourcePool { /** * Destroys all resource requests in requestQueue . * @ param requestQueue The queue for which all resource requests are to be * destroyed . */ private void destroyRequestQueue ( Queue < AsyncResourceRequest < V > > requestQueue ) { } }
if ( requestQueue != null ) { AsyncResourceRequest < V > resourceRequest = requestQueue . poll ( ) ; while ( resourceRequest != null ) { destroyRequest ( resourceRequest ) ; resourceRequest = requestQueue . poll ( ) ; } }
public class SQLRebuilder { /** * Update the registry and deployment cache to reflect the latest state of * reality . * @ param obj * DOReader of a service deployment object */ private synchronized void updateDeploymentMap ( DigitalObject obj , Connection c ) throws SQLException { } }
Set < RelationshipTuple > sDefs = obj . getRelationships ( Constants . MODEL . IS_DEPLOYMENT_OF , null ) ; Set < RelationshipTuple > models = obj . getRelationships ( Constants . MODEL . IS_CONTRACTOR_OF , null ) ; for ( RelationshipTuple sDefTuple : sDefs ) { String sDef = sDefTuple . getObjectPID ( ) ; for ( RelationshipTuple cModelTuple : models ) { String cModel = cModelTuple . getObjectPID ( ) ; addDeployment ( cModel , sDef , obj , c ) ; } }
public class JoynrUtil { /** * If targetLocation does not exist , it will be created . */ public static void copyDirectory ( File sourceLocation , File targetLocation ) throws IOException { } }
if ( sourceLocation . isDirectory ( ) ) { if ( ! targetLocation . exists ( ) ) { if ( targetLocation . mkdir ( ) == false ) { logger . debug ( "Creating target directory " + targetLocation + " failed." ) ; } } String [ ] children = sourceLocation . list ( ) ; if ( children == null ) { return ; } for ( int i = 0 ; i < children . length ; i ++ ) { copyDirectory ( new File ( sourceLocation , children [ i ] ) , new File ( targetLocation , children [ i ] ) ) ; } } else { FileInputStream in = null ; FileOutputStream out = null ; try { in = new FileInputStream ( sourceLocation ) ; out = new FileOutputStream ( targetLocation ) ; copyStream ( in , out ) ; } finally { if ( in != null ) { in . close ( ) ; } if ( out != null ) { out . close ( ) ; } } }
public class NamedResolverMap { /** * Return the integer value indicated by the given numeric key . * @ param key The key of the value to return . * @ param dfl The default value to return , if the key is absent . * @ return The integer value stored under the given key , or dfl . * @ throws IllegalArgumentException if the value is present , but not an * integer . */ public int getIntegerOrDefault ( int key , int dfl ) { } }
Any3 < Boolean , Integer , String > value = data . getOrDefault ( Any2 . < Integer , String > left ( key ) , Any3 . < Boolean , Integer , String > create2 ( dfl ) ) ; return value . get2 ( ) . orElseThrow ( ( ) -> new IllegalArgumentException ( "expected integer argument for param " + key ) ) ;
public class BitSet { /** * flips a bit and returns the resulting bit value . The index should be less than the * BitSet size . * @ param index the index to flip * @ return previous state of the index */ public boolean flipAndGet ( long index ) { } }
int wordNum = ( int ) ( index >> 6 ) ; // div 64 int bit = ( int ) index & 0x3f ; // mod 64 long bitmask = 1L << bit ; bits [ wordNum ] ^= bitmask ; return ( bits [ wordNum ] & bitmask ) != 0 ;
public class BosClient { /** * Fetches a source object to a new destination in Bos . * @ param bucketName The name of the bucket in which the new object will be created . * @ param key The key in the destination bucket under which the new object will be created . * @ param sourceUrl The url full path for fetching . * @ return A FetchObjectResponse object containing the information returned by Bos for the newly fetching . */ public FetchObjectResponse fetchObject ( String bucketName , String key , String sourceUrl ) { } }
FetchObjectRequest request = new FetchObjectRequest ( bucketName , key , sourceUrl ) ; return this . fetchObject ( request ) ;
public class BiInt2ObjectMap { /** * Compact the backing arrays by rehashing with a capacity just larger than current size * and giving consideration to the load factor . */ public void compact ( ) { } }
final int idealCapacity = ( int ) Math . round ( size ( ) * ( 1.0 / loadFactor ) ) ; rehash ( findNextPositivePowerOfTwo ( Math . max ( MIN_CAPACITY , idealCapacity ) ) ) ;
public class S3DataSegmentPuller { /** * Returns the " version " ( aka last modified timestamp ) of the URI * @ param uri The URI to check the last timestamp * @ return The time in ms of the last modification of the URI in String format * @ throws IOException */ @ Override public String getVersion ( URI uri ) throws IOException { } }
try { final S3Coords coords = new S3Coords ( checkURI ( uri ) ) ; final S3ObjectSummary objectSummary = S3Utils . getSingleObjectSummary ( s3Client , coords . bucket , coords . path ) ; return StringUtils . format ( "%d" , objectSummary . getLastModified ( ) . getTime ( ) ) ; } catch ( AmazonServiceException e ) { if ( S3Utils . isServiceExceptionRecoverable ( e ) ) { // The recoverable logic is always true for IOException , so we want to only pass IOException if it is recoverable throw new IOE ( e , "Could not fetch last modified timestamp from URI [%s]" , uri ) ; } else { throw new RE ( e , "Error fetching last modified timestamp from URI [%s]" , uri ) ; } }
public class ServiceClientImpl { /** * / * ( non - Javadoc ) * @ see com . ibm . g11n . pipeline . client . ServiceClient # createDocumentTranslationRequest ( com . ibm . g11n . pipeline . client . NewDocumentTranslationRequestData ) */ @ Override public DocumentTranslationRequestData createDocumentTranslationRequest ( NewDocumentTranslationRequestData newTranslationRequestData ) throws ServiceException { } }
if ( newTranslationRequestData == null ) { throw new IllegalArgumentException ( "Non-empty newTranslationRequestData must be specified." ) ; } RestInputDocumentTranslationRequestData newRestTRData = new RestInputDocumentTranslationRequestData ( newTranslationRequestData ) ; Gson gson = createGson ( RestInputDocumentTranslationRequestData . class . getName ( ) ) ; String jsonBody = gson . toJson ( newRestTRData , RestInputDocumentTranslationRequestData . class ) ; DocumentTranslationRequestResponse resp = invokeApiJson ( "POST" , escapePathSegment ( account . getInstanceId ( ) ) + "/v2/doc-trs/new" , jsonBody , DocumentTranslationRequestResponse . class ) ; if ( resp . getStatus ( ) == Status . ERROR ) { throw new ServiceException ( resp . getMessage ( ) ) ; } return new DocumentTranslationRequestDataImpl ( resp . id , resp . translationRequest ) ;
public class CacheableSessionLockManager { /** * { @ inheritDoc } */ protected boolean isPersistedLockHolder ( NodeData node ) throws RepositoryException { } }
LockData lData = lockManager . getExactNodeOrCloseParentLock ( node ) ; return lData != null && isLockHolder ( lData ) ;
public class CacheImpl { /** * { @ inheritDoc } */ @ Override public boolean offer ( String name , Object obj ) { } }
boolean accepted = false ; // check map size if ( CACHE . size ( ) < capacity ) { SoftReference < ? > tmp = CACHE . get ( name ) ; // because soft references can be garbage collected when a system is // in need of memory , we will check that the cacheable object is // valid // log . debug ( " Softreference : " + ( null = = tmp ) ) ; // if ( null ! = tmp ) { // log . debug ( " Softreference value : " + ( null = = tmp . get ( ) ) ) ; if ( null == tmp || null == tmp . get ( ) ) { ICacheable cacheable = null ; if ( obj instanceof ICacheable ) { cacheable = ( ICacheable ) obj ; } else { cacheable = new CacheableImpl ( obj ) ; } // set the objects name cacheable . setName ( name ) ; // set a registry entry registry . put ( name , 1 ) ; // create a soft reference SoftReference < ICacheable > value = new SoftReference < ICacheable > ( cacheable ) ; CACHE . put ( name , value ) ; // set acceptance accepted = true ; log . info ( "{} has been added to the cache. Current size: {}" , name , CACHE . size ( ) ) ; } } else { log . warn ( "Cache has reached max element size: " + capacity ) ; } return accepted ;
public class TxRecoveryAgentImpl { /** * This method retrieves a system property named com . ibm . tx . jta . impl . PeerLeaseCheckInterval * which allows a value to be specified for the time we should wait between peer server status checks . * @ return */ private int getPeerLeaseCheckInterval ( ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "getPeerLeaseCheckInterval" ) ; int intToReturn ; Integer peerLeaseCheckInterval = null ; try { peerLeaseCheckInterval = AccessController . doPrivileged ( new PrivilegedExceptionAction < Integer > ( ) { @ Override public Integer run ( ) { return Integer . getInteger ( "com.ibm.tx.jta.impl.PeerLeaseCheckInterval" , 20 ) ; // Default is 20 seconds } } ) ; } catch ( PrivilegedActionException e ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Exception setting Peer Lease-Check Interval" , e ) ; peerLeaseCheckInterval = null ; } if ( peerLeaseCheckInterval == null ) peerLeaseCheckInterval = new Integer ( 20 ) ; intToReturn = peerLeaseCheckInterval . intValue ( ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getPeerLeaseCheckInterval" , intToReturn ) ; return intToReturn ;
public class ContainerDefinition { /** * The mount points for data volumes in your container . * This parameter maps to < code > Volumes < / code > in the < a * href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a > section of the * < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the < code > - - volume < / code > option to * < a href = " https : / / docs . docker . com / engine / reference / run / " > docker run < / a > . * Windows containers can mount whole directories on the same drive as < code > $ env : ProgramData < / code > . Windows * containers cannot mount directories on a different drive , and mount point cannot be across drives . * @ return The mount points for data volumes in your container . < / p > * This parameter maps to < code > Volumes < / code > in the < a * href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a > section * of the < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the * < code > - - volume < / code > option to < a href = " https : / / docs . docker . com / engine / reference / run / " > docker run < / a > . * Windows containers can mount whole directories on the same drive as < code > $ env : ProgramData < / code > . * Windows containers cannot mount directories on a different drive , and mount point cannot be across * drives . */ public java . util . List < MountPoint > getMountPoints ( ) { } }
if ( mountPoints == null ) { mountPoints = new com . amazonaws . internal . SdkInternalList < MountPoint > ( ) ; } return mountPoints ;
public class SubordinateControlInterceptor { /** * Before servicing the request , apply the registered subordinate controls to make sure any state changes that have * occurred on the client are applied . * @ param request the request being serviced */ @ Override public void serviceRequest ( final Request request ) { } }
// Only apply for POST if ( "POST" . equals ( request . getMethod ( ) ) ) { // Apply Controls ( Use values on request ) SubordinateControlHelper . applyRegisteredControls ( request , true ) ; } // Service Request super . serviceRequest ( request ) ;
public class RESTResponseHelper { /** * This method builds the overview for the resources and collection we offer * in our implementation . * @ param pDatabase * path to the storage * @ param pStorageFac * factory for creating backends * @ param pRevision * revision algorithm utilized * @ return The streaming output for the HTTP response body . */ public static StreamingOutput buildResponseOfDomLR ( final IStorage pDatabase , final IBackendFactory pStorageFac , final IRevisioning pRevision ) { } }
final StreamingOutput sOutput = new StreamingOutput ( ) { @ Override public void write ( final OutputStream output ) throws IOException , WebApplicationException { Document document ; try { document = createSurroundingXMLResp ( ) ; final Element resElement = RESTResponseHelper . createResultElement ( document ) ; List < Element > collections ; try { collections = RESTResponseHelper . createCollectionElementDBs ( pDatabase , document , pStorageFac , pRevision ) ; } catch ( final TTException exce ) { throw new WebApplicationException ( exce ) ; } for ( final Element resource : collections ) { resElement . appendChild ( resource ) ; } document . appendChild ( resElement ) ; final DOMSource domSource = new DOMSource ( document ) ; final StreamResult streamResult = new StreamResult ( output ) ; final Transformer transformer = TransformerFactory . newInstance ( ) . newTransformer ( ) ; transformer . transform ( domSource , streamResult ) ; } catch ( final ParserConfigurationException exce ) { throw new WebApplicationException ( exce ) ; } catch ( final TransformerConfigurationException exce ) { throw new WebApplicationException ( exce ) ; } catch ( final TransformerFactoryConfigurationError exce ) { throw new WebApplicationException ( exce ) ; } catch ( final TransformerException exce ) { throw new WebApplicationException ( exce ) ; } } } ; return sOutput ;
public class CollectionValuedMap { /** * For testing only . * @ param args * from command line */ public static void main ( String [ ] args ) { } }
CollectionValuedMap < Integer , Integer > originalMap = new CollectionValuedMap < Integer , Integer > ( ) ; /* for ( int i = 0 ; i < 4 ; i + + ) { for ( int j = 0 ; j < 4 ; j + + ) { originalMap . add ( new Integer ( i ) , new Integer ( j ) ) ; originalMap . remove ( new Integer ( 2 ) ) ; System . out . println ( " Map : " ) ; System . out . println ( originalMap ) ; System . exit ( 0 ) ; */ Random r = new Random ( ) ; for ( int i = 0 ; i < 800 ; i ++ ) { Integer rInt1 = Integer . valueOf ( r . nextInt ( 400 ) ) ; Integer rInt2 = Integer . valueOf ( r . nextInt ( 400 ) ) ; originalMap . add ( rInt1 , rInt2 ) ; System . out . println ( "Adding " + rInt1 + ' ' + rInt2 ) ; } CollectionValuedMap < Integer , Integer > originalCopyMap = new CollectionValuedMap < Integer , Integer > ( originalMap ) ; CollectionValuedMap < Integer , Integer > deltaCopyMap = new CollectionValuedMap < Integer , Integer > ( originalMap ) ; CollectionValuedMap < Integer , Integer > deltaMap = new DeltaCollectionValuedMap < Integer , Integer > ( originalMap ) ; // now make a lot of changes to deltaMap ; // add and change some stuff for ( int i = 0 ; i < 400 ; i ++ ) { Integer rInt1 = Integer . valueOf ( r . nextInt ( 400 ) ) ; Integer rInt2 = Integer . valueOf ( r . nextInt ( 400 ) + 1000 ) ; deltaMap . add ( rInt1 , rInt2 ) ; deltaCopyMap . add ( rInt1 , rInt2 ) ; System . out . println ( "Adding " + rInt1 + ' ' + rInt2 ) ; } // remove some stuff for ( int i = 0 ; i < 400 ; i ++ ) { Integer rInt1 = Integer . valueOf ( r . nextInt ( 1400 ) ) ; Integer rInt2 = Integer . valueOf ( r . nextInt ( 1400 ) ) ; deltaMap . removeMapping ( rInt1 , rInt2 ) ; deltaCopyMap . removeMapping ( rInt1 , rInt2 ) ; System . out . println ( "Removing " + rInt1 + ' ' + rInt2 ) ; } System . out . println ( "original: " + originalMap ) ; System . out . println ( "copy: " + deltaCopyMap ) ; System . out . println ( "delta: " + deltaMap ) ; System . out . println ( "Original preserved? " + originalCopyMap . equals ( originalMap ) ) ; System . out . println ( "Delta accurate? " + deltaMap . equals ( deltaCopyMap ) ) ;
public class OAuthProviderProcessingFilter { /** * Validate the signature of the request given the authentication request . * @ param authentication The authentication request . */ protected void validateSignature ( ConsumerAuthentication authentication ) throws AuthenticationException { } }
SignatureSecret secret = authentication . getConsumerDetails ( ) . getSignatureSecret ( ) ; String token = authentication . getConsumerCredentials ( ) . getToken ( ) ; OAuthProviderToken authToken = null ; if ( token != null && ! "" . equals ( token ) ) { authToken = getTokenServices ( ) . getToken ( token ) ; } String signatureMethod = authentication . getConsumerCredentials ( ) . getSignatureMethod ( ) ; OAuthSignatureMethod method ; try { method = getSignatureMethodFactory ( ) . getSignatureMethod ( signatureMethod , secret , authToken != null ? authToken . getSecret ( ) : null ) ; } catch ( UnsupportedSignatureMethodException e ) { throw new OAuthException ( e . getMessage ( ) , e ) ; } String signatureBaseString = authentication . getConsumerCredentials ( ) . getSignatureBaseString ( ) ; String signature = authentication . getConsumerCredentials ( ) . getSignature ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Verifying signature " + signature + " for signature base string " + signatureBaseString + " with method " + method . getName ( ) + "." ) ; } method . verify ( signatureBaseString , signature ) ;
public class AutoValueOrOneOfProcessor { /** * Checks that , if the given { @ code @ AutoValue } or { @ code @ AutoOneOf } class is nested , it is * static and not private . This check is not necessary for correctness , since the generated code * would not compile if the check fails , but it produces better error messages for the user . */ final void checkModifiersIfNested ( TypeElement type ) { } }
ElementKind enclosingKind = type . getEnclosingElement ( ) . getKind ( ) ; if ( enclosingKind . isClass ( ) || enclosingKind . isInterface ( ) ) { if ( type . getModifiers ( ) . contains ( Modifier . PRIVATE ) ) { errorReporter . abortWithError ( "@" + simpleAnnotationName + " class must not be private" , type ) ; } else if ( Visibility . effectiveVisibilityOfElement ( type ) . equals ( Visibility . PRIVATE ) ) { // The previous case , where the class itself is private , is much commoner so it deserves // its own error message , even though it would be caught by the test here too . errorReporter . abortWithError ( "@" + simpleAnnotationName + " class must not be nested in a private class" , type ) ; } if ( ! type . getModifiers ( ) . contains ( Modifier . STATIC ) ) { errorReporter . abortWithError ( "Nested @" + simpleAnnotationName + " class must be static" , type ) ; } } // In principle type . getEnclosingElement ( ) could be an ExecutableElement ( for a class // declared inside a method ) , but since RoundEnvironment . getElementsAnnotatedWith doesn ' t // return such classes we won ' t see them here .
public class RtmpClient { /** * Send a remote procedure call . * @ param endpoint The endpoint of the call * @ param service The service handling the call * @ param method The method to call * @ param args Optional args to the call * @ return The callback getting called once the rpc returns a result */ public InvokeCallback sendRpcWithEndpoint ( String endpoint , String service , String method , Object ... args ) { } }
RemotingMessage message = createRemotingMessage ( endpoint , service , method , args ) ; Invoke invoke = createAmf3InvokeSkeleton ( null , message ) ; InvokeCallback callback = getInvokeCallback ( invoke . getInvokeId ( ) ) ; send ( invoke ) ; return callback ;
public class ChargingStationEventListener { /** * Makes a charging station reservable or not reservable . * @ param chargingStationId the charging station to make reservable or not reservable . * @ param reservable true if reservable , false if not . */ private void setReservable ( ChargingStationId chargingStationId , boolean reservable ) { } }
ChargingStation chargingStation = repository . findOne ( chargingStationId . getId ( ) ) ; if ( chargingStation != null ) { chargingStation . setReservable ( reservable ) ; repository . createOrUpdate ( chargingStation ) ; }
public class TimeShift { /** * in place modify of TsdbResult array to increase timestamps by timeshift * @ param data _ query * @ param results * @ param params * @ return */ @ Override public DataPoints [ ] evaluate ( TSQuery data_query , List < DataPoints [ ] > results , List < String > params ) { } }
// not 100 % sure what to do here - > do I need to think of the case where I have no data points if ( results == null || results . isEmpty ( ) ) { return new DataPoints [ ] { } ; } if ( params == null || params . isEmpty ( ) ) { throw new IllegalArgumentException ( "Need amount of timeshift to perform timeshift" ) ; } String param = params . get ( 0 ) ; if ( param == null || param . length ( ) == 0 ) { throw new IllegalArgumentException ( "Invalid timeshift='" + param + "'" ) ; } param = param . trim ( ) ; long timeshift = - 1 ; if ( param . startsWith ( "'" ) && param . endsWith ( "'" ) ) { timeshift = parseParam ( param ) ; } else { throw new RuntimeException ( "Invalid timeshift parameter: eg '10min'" ) ; } if ( timeshift <= 0 ) { throw new RuntimeException ( "timeshift <= 0" ) ; } return performShift ( results . get ( 0 ) , timeshift ) ;
public class ComputationGraph { /** * Do backprop ( gradient calculation ) * @ param truncatedBPTT false : normal backprop . true : calculate gradients using truncated BPTT for RNN layers * @ param externalEpsilons null usually ( for typical supervised learning ) . If not null ( and length > 0 ) then assume that * the user has provided some errors externally , as they would do for example in reinforcement * learning situations . */ protected void calcBackpropGradients ( boolean clearLayers , boolean truncatedBPTT , INDArray ... externalEpsilons ) { } }
if ( flattenedGradients == null ) { initGradientsView ( ) ; } /* Design for workspaces use in backprop for ComputationGraph is similar to MultiLayerNetwork and shares some features with outputOfLayersDetached Specifically : 1 . We assume forward pass has already been done , and hence layer input fields are set ( with all arrays / activations in workspace WS _ ALL _ LAYERS _ ACT if appropriate ) 2 . We use a set of small workspaces to contain the activation gradients for a single layer These are opened once per layer , and are closed only once the corresponding activation gradients have been consumed by all layers */ if ( externalEpsilons == null || externalEpsilons . length == 0 && configuration . getTrainingWorkspaceMode ( ) != WorkspaceMode . NONE ) { WorkspaceUtils . assertOpenAndActive ( WS_ALL_LAYERS_ACT , "Expected workspace WS_ALL_LAYERS_ACT to be active and open" + " in calcBackpropGradients when workspace mode is not set to NONE" ) ; } // Validate the network configuration for external errors - no output layers if ( externalEpsilons != null && externalEpsilons . length > 0 ) { List < String > outputLayers = configuration . getNetworkOutputs ( ) ; for ( String s : outputLayers ) { GraphVertex gv = getVertex ( s ) ; if ( gv instanceof LayerVertex && ( ( LayerVertex ) gv ) . getLayer ( ) instanceof IOutputLayer ) { throw new IllegalStateException ( "Cannot perform backprop with external errors in conjunction with an output layer:" + " output layers cannot use external errors for backprop. Layer name: " + s ) ; } } } // Position in array : index of vertex . Value at position : the step ( in topological order ) that the activation // gradients of the specified vertex have been consumed by // Put another way : this is the step that it ' s safe to deallocate the layer ' s activation gradients by closing the // corresponding workspace // TODO we can probably cache this . . . int [ ] vertexActGradsFullyConsumedByStep = new int [ topologicalOrder . length ] ; for ( GraphVertex gv : vertices ) { int idx = gv . getVertexIndex ( ) ; int minStepOfInputFrom = Integer . MAX_VALUE ; VertexIndices [ ] inputsFrom = gv . getInputVertices ( ) ; if ( inputsFrom != null ) { // inputsFrom may be null for input vertex for ( VertexIndices vi : inputsFrom ) { int posInTopoSort = ArrayUtils . indexOf ( topologicalOrder , vi . getVertexIndex ( ) ) ; if ( posInTopoSort == - 1 ) { throw new IllegalStateException ( "Did not find vertex " + vi . getVertexIndex ( ) + " in topological sort array" ) ; } minStepOfInputFrom = Math . min ( minStepOfInputFrom , posInTopoSort ) ; } } if ( minStepOfInputFrom == Integer . MAX_VALUE ) { // Input vertex , etc vertexActGradsFullyConsumedByStep [ idx ] = 0 ; } else { vertexActGradsFullyConsumedByStep [ idx ] = minStepOfInputFrom ; } } boolean noWS = configuration . getInferenceWorkspaceMode ( ) == WorkspaceMode . NONE ; LayerWorkspaceMgr allNone = noWS ? LayerWorkspaceMgr . noWorkspaces ( helperWorkspaces ) : null ; List < LayerWorkspaceMgr > allWorkspaceManagers = new ArrayList < > ( ) ; List < LayerWorkspaceMgr > freeWorkspaceManagers = new ArrayList < > ( ) ; // Basically used as a stack Map < MemoryWorkspace , LayerWorkspaceMgr > openActivationsWorkspaces = new IdentityHashMap < > ( ) ; List < MemoryWorkspace > [ ] closeAtEndIteraton = ( List < MemoryWorkspace > [ ] ) new List [ topologicalOrder . length ] ; // Do backprop , in reverse topological order LinkedList < Triple < String , INDArray , Character > > gradients = new LinkedList < > ( ) ; boolean [ ] setVertexEpsilon = new boolean [ topologicalOrder . length ] ; // If true : already set epsilon for this vertex ; later epsilons should be * added * to the existing one , not set MemoryWorkspace initialWorkspace = Nd4j . getMemoryManager ( ) . getCurrentWorkspace ( ) ; try { for ( int i = topologicalOrder . length - 1 ; i >= 0 ; i -- ) { boolean hitFrozen = false ; GraphVertex current = vertices [ topologicalOrder [ i ] ] ; int vIdx = current . getVertexIndex ( ) ; String vertexName = current . getVertexName ( ) ; // FIXME : make the frozen vertex feature extraction more flexible if ( current . hasLayer ( ) && current . getLayer ( ) instanceof FrozenLayer || current instanceof FrozenVertex ) { hitFrozen = true ; } if ( current . isInputVertex ( ) || hitFrozen ) { // Close any activation gradient workspaces that we no longer require // Note that activation gradient workspaces can be closed only once the corresponding activations // gradients have been fully consumed if ( closeAtEndIteraton [ i ] != null ) { for ( MemoryWorkspace wsAct : closeAtEndIteraton [ i ] ) { wsAct . close ( ) ; LayerWorkspaceMgr canNowReuse = openActivationsWorkspaces . remove ( wsAct ) ; freeWorkspaceManagers . add ( canNowReuse ) ; } } closeAtEndIteraton [ i ] = null ; continue ; } // First : determine what workspace manager we should use for the activation gradients from this vertex LayerWorkspaceMgr workspaceMgr ; if ( noWS ) { workspaceMgr = allNone ; } else { // First : is there a free activation gradient workspace we can use ? if ( freeWorkspaceManagers . size ( ) > 0 ) { workspaceMgr = freeWorkspaceManagers . remove ( freeWorkspaceManagers . size ( ) - 1 ) ; } else { // No existing free workspace managers for forward pass - create a new one . . . String wsName = "WS_LAYER_ACT_" + allWorkspaceManagers . size ( ) ; workspaceMgr = LayerWorkspaceMgr . builder ( ) . with ( ArrayType . INPUT , WS_ALL_LAYERS_ACT , WS_ALL_LAYERS_ACT_CONFIG ) . with ( ArrayType . ACTIVATION_GRAD , wsName , WS_LAYER_ACT_X_CONFIG ) . with ( ArrayType . ACTIVATIONS , WS_LAYER_WORKING_MEM , WS_LAYER_WORKING_MEM_CONFIG ) // For forward pass in the context of BP . with ( ArrayType . FF_WORKING_MEM , WS_LAYER_WORKING_MEM , WS_LAYER_WORKING_MEM_CONFIG ) . with ( ArrayType . BP_WORKING_MEM , WS_LAYER_WORKING_MEM , WS_LAYER_WORKING_MEM_CONFIG ) . with ( ArrayType . RNN_FF_LOOP_WORKING_MEM , WS_RNN_LOOP_WORKING_MEM , WS_RNN_LOOP_WORKING_MEM_CONFIG ) . with ( ArrayType . RNN_BP_LOOP_WORKING_MEM , WS_RNN_LOOP_WORKING_MEM , WS_RNN_LOOP_WORKING_MEM_CONFIG ) . build ( ) ; allWorkspaceManagers . add ( workspaceMgr ) ; } } workspaceMgr . setHelperWorkspacePointers ( helperWorkspaces ) ; if ( current . isOutputVertex ( ) ) { // Two reasons for a vertex to be an output vertex : // ( a ) it ' s an output layer ( i . e . , instanceof IOutputLayer ) , or // ( b ) it ' s a normal layer , but it has been marked as an output layer for use in external errors - for reinforcement learning , for example int thisOutputNumber = configuration . getNetworkOutputs ( ) . indexOf ( current . getVertexName ( ) ) ; Layer currentLayer = current . getLayer ( ) ; if ( currentLayer instanceof FrozenLayerWithBackprop ) { currentLayer = ( ( FrozenLayerWithBackprop ) currentLayer ) . getInsideLayer ( ) ; } if ( currentLayer instanceof IOutputLayer ) { IOutputLayer outputLayer = ( IOutputLayer ) currentLayer ; INDArray currLabels = labels [ thisOutputNumber ] ; outputLayer . setLabels ( currLabels ) ; } else { if ( ( externalEpsilons == null || externalEpsilons . length == 0 ) && labels [ thisOutputNumber ] != null ) { throw new DL4JException ( "Layer \"" + current . getVertexName ( ) + "\" of type " + current . getLayer ( ) . getClass ( ) . getSimpleName ( ) + " is set as network output " + "(but isn't an IOutputLayer). Only IOutputLayer layers can be fit via backprop with" + " a labels array. " ) ; } current . setEpsilon ( externalEpsilons [ thisOutputNumber ] ) ; setVertexEpsilon [ topologicalOrder [ i ] ] = true ; } } // Actually execute backprop for the specified vertex // First : Open the relevant workspace for the activations . // Note that this will be closed only once the current vertex ' s activations have been consumed MemoryWorkspace wsActivationGrads = workspaceMgr . notifyScopeEntered ( ArrayType . ACTIVATION_GRAD ) ; openActivationsWorkspaces . put ( wsActivationGrads , workspaceMgr ) ; // Note that because we ' re opening activation gradient workspaces not in any defined order ( i . e . , workspace // use isn ' t simply nested ) , we ' ll manually override the previous workspace setting . Otherwise , when we // close these workspaces , the " current " workspace may be set to the incorrect one wsActivationGrads . setPreviousWorkspace ( initialWorkspace ) ; int closeableAt = vertexActGradsFullyConsumedByStep [ vIdx ] ; if ( closeableAt >= 0 ) { if ( closeAtEndIteraton [ closeableAt ] == null ) { closeAtEndIteraton [ closeableAt ] = new ArrayList < > ( ) ; } closeAtEndIteraton [ closeableAt ] . add ( wsActivationGrads ) ; } Pair < Gradient , INDArray [ ] > pair ; INDArray [ ] epsilons ; try ( MemoryWorkspace wsWorkingMem = workspaceMgr . notifyScopeEntered ( ArrayType . BP_WORKING_MEM ) ) { pair = current . doBackward ( truncatedBPTT , workspaceMgr ) ; epsilons = pair . getSecond ( ) ; // Validate workspace location for the activation gradients : // validateArrayWorkspaces ( LayerWorkspaceMgr mgr , INDArray array , ArrayType arrayType , String vertexName , boolean isInputVertex , String op ) { for ( INDArray epsilon : epsilons ) { if ( epsilon != null ) { // May be null for EmbeddingLayer , etc validateArrayWorkspaces ( workspaceMgr , epsilon , ArrayType . ACTIVATION_GRAD , vertexName , false , "Backprop" ) ; } } } // Inputs to the current GraphVertex : VertexIndices [ ] inputVertices = current . getInputVertices ( ) ; // Set epsilons for the vertices that provide inputs to this vertex : if ( inputVertices != null ) { int j = 0 ; for ( VertexIndices v : inputVertices ) { GraphVertex gv = vertices [ v . getVertexIndex ( ) ] ; if ( setVertexEpsilon [ gv . getVertexIndex ( ) ] ) { // This vertex : must output to multiple vertices . . . we want to add the epsilons here INDArray currentEps = gv . getEpsilon ( ) ; gv . setEpsilon ( currentEps . addi ( epsilons [ j ++ ] ) ) ; // TODO is this always safe ? } else { gv . setEpsilon ( epsilons [ j ++ ] ) ; } setVertexEpsilon [ gv . getVertexIndex ( ) ] = true ; } } if ( pair . getFirst ( ) != null ) { Gradient g = pair . getFirst ( ) ; Map < String , INDArray > map = g . gradientForVariable ( ) ; LinkedList < Triple < String , INDArray , Character > > tempList = new LinkedList < > ( ) ; for ( Map . Entry < String , INDArray > entry : map . entrySet ( ) ) { String origName = entry . getKey ( ) ; String newName = current . getVertexName ( ) + "_" + origName ; tempList . addFirst ( new Triple < > ( newName , entry . getValue ( ) , g . flatteningOrderForVariable ( origName ) ) ) ; } for ( Triple < String , INDArray , Character > t : tempList ) gradients . addFirst ( t ) ; } // Close any activation gradient workspaces that we no longer require // Note that activation gradient workspaces can be closed only once the corresponding activations // gradients have been fully consumed if ( closeAtEndIteraton [ i ] != null ) { for ( MemoryWorkspace wsAct : closeAtEndIteraton [ i ] ) { wsAct . close ( ) ; LayerWorkspaceMgr canNowReuse = openActivationsWorkspaces . remove ( wsAct ) ; freeWorkspaceManagers . add ( canNowReuse ) ; } closeAtEndIteraton [ i ] = null ; } } } finally { // Close all open workspaces . . . usually this list will be empty , but not if an exception is thrown for ( MemoryWorkspace ws : openActivationsWorkspaces . keySet ( ) ) { ws . close ( ) ; } Nd4j . getMemoryManager ( ) . setCurrentWorkspace ( initialWorkspace ) ; } // Now , add the gradients in the order we need them in for flattening ( same as params order ) Gradient gradient = new DefaultGradient ( flattenedGradients ) ; for ( Triple < String , INDArray , Character > t : gradients ) { gradient . setGradientFor ( t . getFirst ( ) , t . getSecond ( ) , t . getThird ( ) ) ; } this . gradient = gradient ; if ( truncatedBPTT && clearTbpttState ) { rnnClearPreviousState ( ) ; } // Clear inputs and epsilons : if ( clearLayers ) { for ( GraphVertex gv : vertices ) { gv . clear ( ) ; } }
public class DropinMonitor { /** * Call this method to stop all applications started by the dropin monitor ( for example if dropins is disabled * on a server startup but the applications it started last server run are still there ) . */ private void deleteAllConfiguredApplications ( ) { } }
try { Configuration [ ] configs = configAdmin . listConfigurations ( "(&(service.factoryPid=" + AppManagerConstants . APPLICATIONS_PID + ")(" + AppManagerConstants . AUTO_INSTALL_PROP + "=true))" ) ; if ( configs != null ) { for ( Configuration c : configs ) { try { c . delete ( ) ; } catch ( Exception e ) { // Move on , but FFDC } } } } catch ( IOException e ) { // Hmm , I guess this is a bad place to be , not sure what to do . I guess I just FFDC and move on . // A clean start will fix this issue . } catch ( InvalidSyntaxException e ) { // This should never happen , if it does we want the FFDC }
public class CommerceCountryPersistenceImpl { /** * Returns the commerce country where groupId = & # 63 ; and twoLettersISOCode = & # 63 ; or throws a { @ link NoSuchCountryException } if it could not be found . * @ param groupId the group ID * @ param twoLettersISOCode the two letters iso code * @ return the matching commerce country * @ throws NoSuchCountryException if a matching commerce country could not be found */ @ Override public CommerceCountry findByG_Tw ( long groupId , String twoLettersISOCode ) throws NoSuchCountryException { } }
CommerceCountry commerceCountry = fetchByG_Tw ( groupId , twoLettersISOCode ) ; if ( commerceCountry == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", twoLettersISOCode=" ) ; msg . append ( twoLettersISOCode ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCountryException ( msg . toString ( ) ) ; } return commerceCountry ;
public class ParsedColInfo { /** * Helpers */ public ParsedColInfo updateTableName ( String tblName , String tblAlias ) { } }
m_tableName = tblName ; m_tableAlias = tblAlias ; return this ;
public class AtomPlacer3D { /** * Gets the distanceValue attribute of the parameter set . * @ param id1 atom1 id * @ param id2 atom2 id * @ return The distanceValue value from the force field parameter set */ public double getBondLengthValue ( String id1 , String id2 ) { } }
String dkey = "" ; if ( pSet . containsKey ( ( "bond" + id1 + ";" + id2 ) ) ) { dkey = "bond" + id1 + ";" + id2 ; } else if ( pSet . containsKey ( ( "bond" + id2 + ";" + id1 ) ) ) { dkey = "bond" + id2 + ";" + id1 ; } else { logger . warn ( "KEYError: Unknown distance key in pSet: " + id2 + ";" + id1 + " take default bond length: " + DEFAULT_BOND_LENGTH ) ; return DEFAULT_BOND_LENGTH ; } return ( ( Double ) ( pSet . get ( dkey ) . get ( 0 ) ) ) . doubleValue ( ) ;
public class Formation { /** * Updates the assignment of members to slots */ public void updateSlotAssignments ( ) { } }
// Apply the strategy to update slot assignments slotAssignmentStrategy . updateSlotAssignments ( slotAssignments ) ; // Set the newly calculated number of slots pattern . setNumberOfSlots ( slotAssignmentStrategy . calculateNumberOfSlots ( slotAssignments ) ) ; // Update the drift offset if a motion moderator is set if ( motionModerator != null ) motionModerator . calculateDriftOffset ( driftOffset , slotAssignments , pattern ) ;
public class Position { /** * calculates the distance of a point to the great circle path between p1 * and p2. * Formula from : http : / / www . movable - type . co . uk / scripts / latlong . html * @ param p1 * @ param p2 * @ return */ public final double getDistanceKmToPath ( Position p1 , Position p2 ) { } }
double d = radiusEarthKm * asin ( sin ( getDistanceToKm ( p1 ) / radiusEarthKm ) * sin ( toRadians ( getBearingDegrees ( p1 ) - p1 . getBearingDegrees ( p2 ) ) ) ) ; return abs ( d ) ;
public class RealexHpp { /** * Method produces JSON from < code > HppRequest < / code > object . * Carries out the following actions : * < ul > * < li > Validates inputs < / li > * < li > Generates defaults for security hash , order ID and time stamp ( if required ) < / li > * < li > Optional to Base64 encode inputs < / li > * < li > Serialises request object to JSON < / li > * < / ul > * @ param hppRequest * @ param encoded < code > true < / code > if the JSON values should be encoded . * @ return String */ public String requestToJson ( HppRequest hppRequest , boolean encoded ) { } }
LOGGER . info ( "Converting HppRequest to JSON." ) ; String json = null ; // generate defaults LOGGER . debug ( "Generating defaults." ) ; hppRequest . generateDefaults ( secret ) ; // validate request LOGGER . debug ( "Validating request." ) ; ValidationUtils . validate ( hppRequest ) ; // encode LOGGER . debug ( "Encoding object." ) ; try { if ( encoded ) { hppRequest = hppRequest . encode ( ENCODING_CHARSET ) ; } } catch ( UnsupportedEncodingException ex ) { LOGGER . error ( "Exception encoding HPP request." , ex ) ; throw new RealexException ( "Exception encoding HPP request." , ex ) ; } // convert to JSON LOGGER . debug ( "Converting to JSON." ) ; json = JsonUtils . toJson ( hppRequest ) ; return json ;
public class HttpURI { public URI toURI ( ) throws URISyntaxException { } }
return new URI ( _scheme , null , _host , _port , _path , _query == null ? null : UrlEncoded . decodeString ( _query ) , _fragment ) ;
public class AnnotationUtil { /** * Returns all annotations searching from the specified { @ code element } . The search range depends on * the specified { @ link FindOption } s and the specified { @ code collectingFilter } decides whether * an annotation is collected or not . * @ param element the { @ link AnnotatedElement } to find annotations * @ param findOptions the options to be applied when retrieving annotations * @ param collectingFilter the predicate which decides whether the annotation is to be collected or not */ static List < Annotation > getAnnotations ( AnnotatedElement element , EnumSet < FindOption > findOptions , Predicate < Annotation > collectingFilter ) { } }
requireNonNull ( element , "element" ) ; requireNonNull ( collectingFilter , "collectingFilter" ) ; final Builder < Annotation > builder = new Builder < > ( ) ; for ( final AnnotatedElement e : resolveTargetElements ( element , findOptions ) ) { for ( final Annotation annotation : e . getDeclaredAnnotations ( ) ) { if ( findOptions . contains ( FindOption . LOOKUP_META_ANNOTATIONS ) ) { getMetaAnnotations ( builder , annotation , collectingFilter ) ; } if ( collectingFilter . test ( annotation ) ) { builder . add ( annotation ) ; } } } return builder . build ( ) ;
public class PairtreeFactory { /** * Creates a Pairtree , with the supplied prefix , using the supplied S3 bucket and internal bucket path . * @ param aPrefix A Pairtree prefix * @ param aBucket An S3 bucket * @ param aBucketPath A path in the S3 bucket to the Pairtree root * @ param aAccessKey An AWS access key * @ param aSecretKey An AWS secret key * @ return A Pairtree */ public Pairtree getPrefixedPairtree ( final String aPrefix , final String aBucket , final String aBucketPath , final String aAccessKey , final String aSecretKey ) { } }
return new S3Pairtree ( aPrefix , myVertx , aBucket , aBucketPath , aAccessKey , aSecretKey ) ;
public class TurnWeighting { /** * This method calculates the turn weight separately . */ public double calcTurnWeight ( int edgeFrom , int nodeVia , int edgeTo ) { } }
long turnFlags = turnCostExt . getTurnCostFlags ( edgeFrom , nodeVia , edgeTo ) ; if ( turnCostEncoder . isTurnRestricted ( turnFlags ) ) return Double . POSITIVE_INFINITY ; return turnCostEncoder . getTurnCost ( turnFlags ) ;
public class CloudTasksClient { /** * Forces a task to run now . * < p > When this method is called , Cloud Tasks will dispatch the task , even if the task is already * running , the queue has reached its [ RateLimits ] [ google . cloud . tasks . v2 . RateLimits ] or is * [ PAUSED ] [ google . cloud . tasks . v2 . Queue . State . PAUSED ] . * < p > This command is meant to be used for manual debugging . For example , * [ RunTask ] [ google . cloud . tasks . v2 . CloudTasks . RunTask ] can be used to retry a failed task after a * fix has been made or to manually force a task to be dispatched now . * < p > The dispatched task is returned . That is , the task that is returned contains the * [ status ] [ Task . status ] after the task is dispatched but before the task is received by its * target . * < p > If Cloud Tasks receives a successful response from the task ' s target , then the task will be * deleted ; otherwise the task ' s [ schedule _ time ] [ google . cloud . tasks . v2 . Task . schedule _ time ] will be * reset to the time that [ RunTask ] [ google . cloud . tasks . v2 . CloudTasks . RunTask ] was called plus the * retry delay specified in the queue ' s [ RetryConfig ] [ google . cloud . tasks . v2 . RetryConfig ] . * < p > [ RunTask ] [ google . cloud . tasks . v2 . CloudTasks . RunTask ] returns * [ NOT _ FOUND ] [ google . rpc . Code . NOT _ FOUND ] when it is called on a task that has already succeeded * or permanently failed . * < p > Sample code : * < pre > < code > * try ( CloudTasksClient cloudTasksClient = CloudTasksClient . create ( ) ) { * TaskName name = TaskName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ QUEUE ] " , " [ TASK ] " ) ; * Task response = cloudTasksClient . runTask ( name ) ; * < / code > < / pre > * @ param name Required . * < p > The task name . For example : * ` projects / PROJECT _ ID / locations / LOCATION _ ID / queues / QUEUE _ ID / tasks / TASK _ ID ` * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Task runTask ( TaskName name ) { } }
RunTaskRequest request = RunTaskRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return runTask ( request ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcThermalAdmittanceMeasure ( ) { } }
if ( ifcThermalAdmittanceMeasureEClass == null ) { ifcThermalAdmittanceMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 879 ) ; } return ifcThermalAdmittanceMeasureEClass ;
public class LanguageTranslator { /** * Identify language . * Identifies the language of the input text . * @ param identifyOptions the { @ link IdentifyOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link IdentifiedLanguages } */ public ServiceCall < IdentifiedLanguages > identify ( IdentifyOptions identifyOptions ) { } }
Validator . notNull ( identifyOptions , "identifyOptions cannot be null" ) ; String [ ] pathSegments = { "v3/identify" } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "language_translator" , "v3" , "identify" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; builder . bodyContent ( identifyOptions . text ( ) , "text/plain" ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( IdentifiedLanguages . class ) ) ;
public class ProxyDetails { /** * Returns the lowest index of the given list of values */ protected int indexOf ( String text , String ... values ) { } }
int answer = - 1 ; for ( String value : values ) { int idx = text . indexOf ( value ) ; if ( idx >= 0 ) { if ( answer < 0 || idx < answer ) { answer = idx ; } } } return answer ;
public class AbstractFuture { /** * Returns a value , suitable for storing in the { @ link # value } field . From the given future , * which is assumed to be done . * < p > This is approximately the inverse of { @ link # getDoneValue ( Object ) } */ private static Object getFutureValue ( ListenableFuture < ? > future ) { } }
Object valueToSet ; if ( future instanceof TrustedFuture ) { // Break encapsulation for TrustedFuture instances since we know that subclasses cannot // override . get ( ) ( since it is final ) and therefore this is equivalent to calling . get ( ) // and unpacking the exceptions like we do below ( just much faster because it is a single // field read instead of a read , several branches and possibly creating exceptions ) . return ( ( AbstractFuture < ? > ) future ) . value ; } else { // Otherwise calculate valueToSet by calling . get ( ) try { Object v = getDone ( future ) ; valueToSet = v == null ? NULL : v ; } catch ( ExecutionException exception ) { valueToSet = new Failure ( exception . getCause ( ) ) ; } catch ( CancellationException cancellation ) { valueToSet = new Cancellation ( false , cancellation ) ; } catch ( Throwable t ) { valueToSet = new Failure ( t ) ; } } return valueToSet ;
public class TextWrapper { /** * Wrap the specified text . * @ param text the text to wrap . * @ return the wrapped text . */ public String wrap ( String text ) { } }
StringBuilder sb = new StringBuilder ( ) ; int continuationLength = continuation . length ( ) ; int currentPosition = 0 ; for ( String word : text . split ( " " ) ) { String lastWord ; int wordLength = word . length ( ) ; if ( currentPosition + wordLength <= width ) { if ( currentPosition != 0 ) { sb . append ( " " ) ; currentPosition += 1 ; } sb . append ( lastWord = word ) ; currentPosition += wordLength ; } else { if ( currentPosition > 0 ) { sb . append ( LINE_SEPARATOR ) ; currentPosition = 0 ; } if ( wordLength > width && strict ) { int i = 0 ; while ( i + width < wordLength ) { sb . append ( word . substring ( i , width - continuationLength ) ) . append ( continuation ) . append ( LINE_SEPARATOR ) ; i += width - continuationLength ; } String endOfWord = word . substring ( i ) ; sb . append ( lastWord = endOfWord ) ; currentPosition = endOfWord . length ( ) ; } else { sb . append ( lastWord = word ) ; currentPosition += wordLength ; } } int lastNewLine = lastWord . lastIndexOf ( "\n" ) ; if ( lastNewLine != - 1 ) { currentPosition = lastWord . length ( ) - lastNewLine ; } } return sb . toString ( ) ;
public class Model { /** * Saves the given context , by calling all the { @ link ContextDataFactory } ies . * @ param ctx the context to save . * @ throws IllegalArgumentException ( since TODO add version ) if the given context is { @ code null } . * @ since 2.0.0 * @ see ContextDataFactory # persistContextData ( Session , Context ) */ public void saveContext ( Context ctx ) { } }
validateContextNotNull ( ctx ) ; for ( ContextDataFactory cdf : this . contextDataFactories ) { cdf . persistContextData ( getSession ( ) , ctx ) ; }
public class AbstractIoAcceptor { /** * { @ inheritDoc } */ public final void bind ( SocketAddress firstLocalAddress , SocketAddress ... otherLocalAddresses ) throws IOException { } }
if ( firstLocalAddress == null ) { bind ( getDefaultLocalAddresses ( ) ) ; return ; } List < SocketAddress > localAddresses = new ArrayList < > ( 2 ) ; localAddresses . add ( firstLocalAddress ) ; if ( otherLocalAddresses != null ) { Collections . addAll ( localAddresses , otherLocalAddresses ) ; } bind ( localAddresses ) ;
public class GBSNode { /** * Find the insert point in the right half of the node for a new key . * NB : If the node is less than half full then the new key is to * become the highest key in the node . * @ param new1 New Object to be inserted * @ param point Found insertion point */ void findInsertPointInRight ( Object new1 , NodeInsertPoint point ) { } }
int r = rightMostIndex ( ) ; int m = midPoint ( ) ; if ( r <= m ) { String x = "Erroneous call to findInsertPointInRight(). " + "rightMostIndex() = " + r + ", midPoint() = " + m + "." ; throw new OptimisticDepthException ( x ) ; } findIndex ( m + 1 , r , new1 , point ) ;
public class PerspectiveOps { /** * Takes a list of { @ link AssociatedTriple } as input and breaks it up into three lists for each view . * @ param pairs Input : List of associated triples . * @ param view1 Output : List of observations from view 1 * @ param view2 Output : List of observations from view 2 * @ param view3 Output : List of observations from view 3 */ public static void splitAssociated ( List < AssociatedTriple > pairs , List < Point2D_F64 > view1 , List < Point2D_F64 > view2 , List < Point2D_F64 > view3 ) { } }
for ( AssociatedTriple p : pairs ) { view1 . add ( p . p1 ) ; view2 . add ( p . p2 ) ; view3 . add ( p . p3 ) ; }
public class ContainerClassLoader { /** * to set vars passed up to ClassLoader . definePackage . */ public Package definePackage ( String name , Manifest manifest , URL sealBase ) throws IllegalArgumentException { } }
Attributes mA = manifest . getMainAttributes ( ) ; String specTitle = mA . getValue ( Name . SPECIFICATION_TITLE ) ; String specVersion = mA . getValue ( Name . SPECIFICATION_VERSION ) ; String specVendor = mA . getValue ( Name . SPECIFICATION_VENDOR ) ; String implTitle = mA . getValue ( Name . IMPLEMENTATION_TITLE ) ; String implVersion = mA . getValue ( Name . IMPLEMENTATION_VERSION ) ; String implVendor = mA . getValue ( Name . IMPLEMENTATION_VENDOR ) ; String sealedString = mA . getValue ( Name . SEALED ) ; Boolean sealed = ( sealedString == null ? Boolean . FALSE : sealedString . equalsIgnoreCase ( "true" ) ) ; // now overwrite global attributes with the specific attributes String unixName = name . replaceAll ( "\\." , "/" ) + "/" ; // replace all dots with slash and add trailing slash mA = manifest . getAttributes ( unixName ) ; if ( mA != null ) { String s = mA . getValue ( Name . SPECIFICATION_TITLE ) ; if ( s != null ) specTitle = s ; s = mA . getValue ( Name . SPECIFICATION_VERSION ) ; if ( s != null ) specVersion = s ; s = mA . getValue ( Name . SPECIFICATION_VENDOR ) ; if ( s != null ) specVendor = s ; s = mA . getValue ( Name . IMPLEMENTATION_TITLE ) ; if ( s != null ) implTitle = s ; s = mA . getValue ( Name . IMPLEMENTATION_VERSION ) ; if ( s != null ) implVersion = s ; s = mA . getValue ( Name . IMPLEMENTATION_VENDOR ) ; if ( s != null ) implVendor = s ; s = mA . getValue ( Name . SEALED ) ; if ( s != null ) sealed = s . equalsIgnoreCase ( "true" ) ; } if ( ! sealed ) sealBase = null ; return definePackage ( name , specTitle , specVersion , specVendor , implTitle , implVersion , implVendor , sealBase ) ;
public class ResourceSnippet { /** * returns the line number of the given char in the text */ public static int getLineNumber ( String text , int posChar ) { } }
int len = Math . min ( posChar , text . length ( ) ) ; int result = 1 ; for ( int i = 0 ; i < len ; i ++ ) { if ( text . charAt ( i ) == '\n' ) result ++ ; } return result ;
public class EventsHelper { /** * Bind a function to the mousemove event of each matched element . * @ param jsScope * Scope to use * @ return the jQuery code */ public static ChainableStatement mousemove ( JsScope jsScope ) { } }
return new DefaultChainableStatement ( MouseEvent . MOUSEMOVE . getEventLabel ( ) , jsScope . render ( ) ) ;
public class PrcReplicationAccMethodSave { /** * < p > Process entity request . < / p > * @ param pAddParam additional param , e . g . return this line ' s * document in " nextEntity " for farther process * @ param pRequestData Request Data * @ param pEntity Entity to process * @ return Entity processed for farther process or null * @ throws Exception - an exception */ @ Override public final ReplicationAccMethod process ( final Map < String , Object > pAddParam , final ReplicationAccMethod pEntity , final IRequestData pRequestData ) throws Exception { } }
if ( pEntity . getRequestedDatabaseId ( ) == getSrvOrm ( ) . getIdDatabase ( ) ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "requested_database_must_be_different" ) ; } if ( pEntity . getIsNew ( ) ) { this . srvOrm . insertEntity ( pAddParam , pEntity ) ; } else { this . srvOrm . updateEntity ( pAddParam , pEntity ) ; } for ( IHandlerModelChanged < ReplicationAccMethod > replAccMethChangedHandler : this . replAccMethChangedHandlers ) { replAccMethChangedHandler . handleModelChanged ( pEntity ) ; } pRequestData . setAttribute ( "accSettings" , this . srvAccSettings . lazyGetAccSettings ( pAddParam ) ) ; return pEntity ;
public class CommerceDiscountUsageEntryLocalServiceWrapper { /** * Creates a new commerce discount usage entry with the primary key . Does not add the commerce discount usage entry to the database . * @ param commerceDiscountUsageEntryId the primary key for the new commerce discount usage entry * @ return the new commerce discount usage entry */ @ Override public com . liferay . commerce . discount . model . CommerceDiscountUsageEntry createCommerceDiscountUsageEntry ( long commerceDiscountUsageEntryId ) { } }
return _commerceDiscountUsageEntryLocalService . createCommerceDiscountUsageEntry ( commerceDiscountUsageEntryId ) ;
public class WrappingJavaFileManager { /** * This implementation maps the given list of file objects by * calling wrap on each . Subclasses may override this behavior . * @ param fileObjects a list of file objects * @ return the mapping */ protected Iterable < JavaFileObject > wrap ( Iterable < JavaFileObject > fileObjects ) { } }
List < JavaFileObject > mapped = new ArrayList < JavaFileObject > ( ) ; for ( JavaFileObject fileObject : fileObjects ) mapped . add ( wrap ( fileObject ) ) ; return Collections . unmodifiableList ( mapped ) ;
public class DomainTopicsInner { /** * List domain topics . * List all the topics in a domain . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param domainName Domain name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < DomainTopicInner > > listByDomainAsync ( String resourceGroupName , String domainName , final ServiceCallback < List < DomainTopicInner > > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listByDomainWithServiceResponseAsync ( resourceGroupName , domainName ) , serviceCallback ) ;
public class GeneratorExtensions { /** * Creates a String list with the String constants from the given String list . For instance : * List data = new ArrayList ( ) ; data . add ( " foo " ) ; data . add ( " bar " ) ; * newConstantsFromStringList ( data , false ) ; Result from list : [ " public static final String FOO = * " foo " ; " , " public static final String BAR = " bar " ; " ] * @ param data * The data from what to create the contant strings . * @ param prefix * If the constant name needs a prefix . * @ param suffix * If the constant name needs a suffix . * @ param withQuotation * If the strings in the list have already quotation marks then true . * @ return A list with constants strings . */ public static List < String > newConstantsFromStringList ( final List < String > data , final String prefix , final String suffix , final boolean withQuotation ) { } }
final List < String > returnList = new ArrayList < > ( ) ; final int size = data . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { final String element = data . get ( i ) ; final StringBuilder sb = new StringBuilder ( ) ; sb . append ( "public static final String " ) ; if ( withQuotation ) { String striped = element . trim ( ) . toUpperCase ( ) ; striped = striped . substring ( 1 , striped . length ( ) - 1 ) ; if ( null != prefix ) { sb . append ( prefix . toUpperCase ( ) ) ; } sb . append ( striped ) ; if ( null != suffix ) { sb . append ( suffix . toUpperCase ( ) ) ; } sb . append ( " = " ) ; sb . append ( element + ";" ) ; } else { if ( null != prefix ) { sb . append ( prefix . toUpperCase ( ) ) ; } sb . append ( element . trim ( ) . toUpperCase ( ) ) ; if ( null != suffix ) { sb . append ( suffix . toUpperCase ( ) ) ; } sb . append ( " = " ) ; sb . append ( "\"" + element + "\";" ) ; } returnList . add ( sb . toString ( ) . trim ( ) ) ; } return returnList ;
public class WebSocketDecoder { /** * Returns a map of key / value pairs from a given querystring . * @ param query * @ return k / v map */ public static Map < String , Object > parseQuerystring ( String query ) { } }
String [ ] params = query . split ( "&" ) ; Map < String , Object > map = new HashMap < String , Object > ( ) ; for ( String param : params ) { String [ ] nameValue = param . split ( "=" ) ; map . put ( nameValue [ 0 ] , nameValue [ 1 ] ) ; } return map ;
public class HexHelper { /** * Decodes a hexidecimal string ( optionally with < code > separator < / code > characters separating each two - digit pair ) < br / > * Multiple runs of the separator will be ignored ( eg . AA : : BB will result in < code > [ 0xAA , 0xBB ] < / code > , not * < code > [ 0xAA , 0x00,0xBB ] < / code > ) < br / > * @ param separator * the separator character * @ param value * @ return */ public static final byte [ ] fromHex ( final char separator , final String value ) { } }
if ( value . length ( ) == 0 ) return new byte [ 0 ] ; final String sepString = new String ( new char [ ] { separator } ) ; final StringTokenizer t = new StringTokenizer ( value , sepString , false ) ; final byte [ ] buffer = new byte [ t . countTokens ( ) ] ; int i = 0 ; while ( t . hasMoreTokens ( ) ) { final String hex = t . nextToken ( ) ; if ( hex . length ( ) == 2 ) buffer [ i ++ ] = ( byte ) Integer . parseInt ( hex , 16 ) ; else throw new IllegalArgumentException ( "Hex section of length " + hex . length ( ) + " encountered inside hex string: " + value + " with separator " + separator ) ; } return buffer ;
public class FileUtil { /** * Return an array of FileStatus objects for each file beneath * the given path to a specified depth ( inclusive ) . Directories at * the given depth will be returned , while directories above that * depth will not . * @ param fs FileSystem on which to operate * @ param f starting path * @ param depth the depth of recursion . If 0 the function will * return the status of the current file or directory . * @ exception IOException If this operation fails */ public static FileStatus [ ] listStatus ( FileSystem fs , Path f , int depth ) throws IOException { } }
if ( null == f ) { throw new IllegalArgumentException ( "Path cannot be null." ) ; } if ( null == fs ) { throw new IllegalArgumentException ( "File system cannot be null." ) ; } if ( depth < 0 ) { throw new IllegalArgumentException ( "Depth cannot be negative." ) ; } if ( depth == 0 ) { FileStatus [ ] fileStatus = { fs . getFileStatus ( f ) } ; return fileStatus ; } List < FileStatus > fileStatuses = listStatusHelper ( fs , f , depth , new ArrayList < FileStatus > ( ) ) ; return fileStatuses . toArray ( new FileStatus [ fileStatuses . size ( ) ] ) ;
public class PredicateUtils { /** * Constructs a canonical index name from the given index components . * A canonical name is a comma - separated list of index components with a * single space character going after every comma . * It ' s a caller ' s responsibility to canonicalize individual components * as specified by { @ link # canonicalizeAttribute } . * @ param components the index components to construct the canonical index * name from . * @ return the constructed canonical index name . */ public static String constructCanonicalCompositeIndexName ( String [ ] components ) { } }
assert components . length > 1 ; StringBuilder builder = new StringBuilder ( components . length * EXPECTED_AVERAGE_COMPONENT_NAME_LENGTH ) ; for ( String component : components ) { if ( builder . length ( ) > 0 ) { builder . append ( ", " ) ; } builder . append ( component ) ; } return builder . toString ( ) ;
public class Instance { /** * The elastic inference accelerator associated with the instance . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setElasticInferenceAcceleratorAssociations ( java . util . Collection ) } or * { @ link # withElasticInferenceAcceleratorAssociations ( java . util . Collection ) } if you want to override the existing * values . * @ param elasticInferenceAcceleratorAssociations * The elastic inference accelerator associated with the instance . * @ return Returns a reference to this object so that method calls can be chained together . */ public Instance withElasticInferenceAcceleratorAssociations ( ElasticInferenceAcceleratorAssociation ... elasticInferenceAcceleratorAssociations ) { } }
if ( this . elasticInferenceAcceleratorAssociations == null ) { setElasticInferenceAcceleratorAssociations ( new com . amazonaws . internal . SdkInternalList < ElasticInferenceAcceleratorAssociation > ( elasticInferenceAcceleratorAssociations . length ) ) ; } for ( ElasticInferenceAcceleratorAssociation ele : elasticInferenceAcceleratorAssociations ) { this . elasticInferenceAcceleratorAssociations . add ( ele ) ; } return this ;
public class Publisher { /** * Sends the HUB url a notification of a change in topic asynchronously * @ param hub URL of the hub to notify . * @ param topic The Topic that has changed * @ param callback A callback invoked when the notification completes . * @ throws NotificationException Any failure */ public void sendUpdateNotificationAsyncronously ( final String hub , final String topic , final AsyncNotificationCallback callback ) { } }
final Runnable r = new Runnable ( ) { @ Override public void run ( ) { try { sendUpdateNotification ( hub , topic ) ; callback . onSuccess ( ) ; } catch ( final Throwable t ) { callback . onFailure ( t ) ; } } } ; if ( executor != null ) { executor . execute ( r ) ; } else { new Thread ( r ) . start ( ) ; }