signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RDBMUserIdentityStore { /** * Gets the PortalUser data store object for the specified user name . * @ param userName The user ' s name * @ return A PortalUser object or null if the user doesn ' t exist . */ private PortalUser getPortalUser ( final String userName ) { } }
return jdbcOperations . execute ( ( ConnectionCallback < PortalUser > ) con -> { PortalUser portalUser = null ; PreparedStatement pstmt = null ; try { String query = "SELECT USER_ID FROM UP_USER WHERE USER_NAME=?" ; pstmt = con . prepareStatement ( query ) ; pstmt . setString ( 1 , userName ) ; ResultSet rs = null ; try { if ( log . isDebugEnabled ( ) ) log . debug ( "RDBMUserIdentityStore::getPortalUID(userName=" + userName + "): " + query ) ; rs = pstmt . executeQuery ( ) ; if ( rs . next ( ) ) { portalUser = new PortalUser ( ) ; portalUser . setUserId ( rs . getInt ( "USER_ID" ) ) ; portalUser . setUserName ( userName ) ; } } finally { try { if ( rs != null ) { rs . close ( ) ; } } catch ( Exception e ) { } } } finally { try { if ( pstmt != null ) { pstmt . close ( ) ; } } catch ( Exception e ) { } } return portalUser ; } ) ;
public class StreamUtil { /** * Converts the specified property file text to a Properties object . * @ param propFileText the property file text in standard property file format * @ return the resulting Properties object * @ throws java . nio . charset . CharacterCodingException if invalid encoding */ public static Properties toProperties ( String propFileText ) throws CharacterCodingException { } }
CharsetEncoder encoder = Charset . forName ( "ISO-8859-1" ) . newEncoder ( ) . onUnmappableCharacter ( CodingErrorAction . REPORT ) ; byte [ ] bytes = encoder . encode ( CharBuffer . wrap ( propFileText ) ) . array ( ) ; Properties props = new Properties ( ) ; try { props . load ( new ByteArrayInputStream ( bytes ) ) ; } catch ( IOException ex ) { throw new RuntimeException ( ex ) ; // shouldn ' t happen with BAIS } return props ;
public class PersonDirectoryPrincipalResolver { /** * Extracts the id of the user from the provided credential . This method should be overridden by subclasses to * achieve more sophisticated strategies for producing a principal ID from a credential . * @ param credential the credential provided by the user . * @ param currentPrincipal the current principal * @ return the username , or null if it could not be resolved . */ protected String extractPrincipalId ( final Credential credential , final Optional < Principal > currentPrincipal ) { } }
LOGGER . debug ( "Extracting credential id based on existing credential [{}]" , credential ) ; val id = credential . getId ( ) ; if ( currentPrincipal != null && currentPrincipal . isPresent ( ) ) { val principal = currentPrincipal . get ( ) ; LOGGER . debug ( "Principal is currently resolved as [{}]" , principal ) ; if ( useCurrentPrincipalId ) { LOGGER . debug ( "Using the existing resolved principal id [{}]" , principal . getId ( ) ) ; return principal . getId ( ) ; } else { LOGGER . debug ( "CAS will NOT be using the identifier from the resolved principal [{}] as it's not " + "configured to use the currently-resolved principal id and will fall back onto using the identifier " + "for the credential, that is [{}], for principal resolution" , principal , id ) ; } } else { LOGGER . debug ( "No principal is currently resolved and available. Falling back onto using the identifier " + " for the credential, that is [{}], for principal resolution" , id ) ; } LOGGER . debug ( "Extracted principal id [{}]" , id ) ; return id ;
public class OperationsInner { /** * Lists all the available Cognitive Services account operations . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; OperationEntityInner & gt ; object */ public Observable < Page < OperationEntityInner > > listAsync ( ) { } }
return listWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < Page < OperationEntityInner > > , Page < OperationEntityInner > > ( ) { @ Override public Page < OperationEntityInner > call ( ServiceResponse < Page < OperationEntityInner > > response ) { return response . body ( ) ; } } ) ;
public class CPOptionUtil { /** * Returns the cp option where uuid = & # 63 ; and groupId = & # 63 ; or throws a { @ link NoSuchCPOptionException } if it could not be found . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching cp option * @ throws NoSuchCPOptionException if a matching cp option could not be found */ public static CPOption findByUUID_G ( String uuid , long groupId ) throws com . liferay . commerce . product . exception . NoSuchCPOptionException { } }
return getPersistence ( ) . findByUUID_G ( uuid , groupId ) ;
public class DiagnosticsInner { /** * Get Site Analysis . * Get Site Analysis . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param siteName Site Name * @ param diagnosticCategory Diagnostic Category * @ param analysisName Analysis Name * @ param slot Slot - optional * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DiagnosticAnalysisInner > getSiteAnalysisSlotAsync ( String resourceGroupName , String siteName , String diagnosticCategory , String analysisName , String slot , final ServiceCallback < DiagnosticAnalysisInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getSiteAnalysisSlotWithServiceResponseAsync ( resourceGroupName , siteName , diagnosticCategory , analysisName , slot ) , serviceCallback ) ;
public class DatabasesInner { /** * Creates an import operation that imports a bacpac into an existing database . The existing database must be empty . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database to import into * @ param parameters The required parameters for importing a Bacpac into a database . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ImportExportResponseInner object if successful . */ public ImportExportResponseInner beginCreateImportOperation ( String resourceGroupName , String serverName , String databaseName , ImportExtensionRequest parameters ) { } }
return beginCreateImportOperationWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class EraPreference { /** * used in serialization */ void writeToStream ( DataOutput out ) throws IOException { } }
if ( this == DEFAULT ) { out . writeByte ( 0 ) ; } else { out . writeByte ( NON_DEFAULT_MARKER ) ; out . writeUTF ( this . era . name ( ) ) ; out . writeLong ( this . start . get ( EpochDays . MODIFIED_JULIAN_DATE ) ) ; out . writeLong ( this . end . get ( EpochDays . MODIFIED_JULIAN_DATE ) ) ; }
public class PoolingConnectionFactoryBean { /** * Set the { @ link XAConnectionFactory } directly , instead of calling * { @ link # setClassName ( String ) } . * @ param connectionFactory the connection factory to use */ public void setConnectionFactory ( XAConnectionFactory connectionFactory ) { } }
this . connectionFactory = connectionFactory ; setClassName ( DirectXAConnectionFactory . class . getName ( ) ) ; setDriverProperties ( new Properties ( ) ) ;
public class NodeGroupClient { /** * Updates the node template of the node group . * < p > Sample code : * < pre > < code > * try ( NodeGroupClient nodeGroupClient = NodeGroupClient . create ( ) ) { * ProjectZoneNodeGroupName nodeGroup = ProjectZoneNodeGroupName . of ( " [ PROJECT ] " , " [ ZONE ] " , " [ NODE _ GROUP ] " ) ; * NodeGroupsSetNodeTemplateRequest nodeGroupsSetNodeTemplateRequestResource = NodeGroupsSetNodeTemplateRequest . newBuilder ( ) . build ( ) ; * Operation response = nodeGroupClient . setNodeTemplateNodeGroup ( nodeGroup . toString ( ) , nodeGroupsSetNodeTemplateRequestResource ) ; * < / code > < / pre > * @ param nodeGroup Name of the NodeGroup resource to update . * @ param nodeGroupsSetNodeTemplateRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation setNodeTemplateNodeGroup ( String nodeGroup , NodeGroupsSetNodeTemplateRequest nodeGroupsSetNodeTemplateRequestResource ) { } }
SetNodeTemplateNodeGroupHttpRequest request = SetNodeTemplateNodeGroupHttpRequest . newBuilder ( ) . setNodeGroup ( nodeGroup ) . setNodeGroupsSetNodeTemplateRequestResource ( nodeGroupsSetNodeTemplateRequestResource ) . build ( ) ; return setNodeTemplateNodeGroup ( request ) ;
public class InvokableAnnotatedMethod { /** * Invokes the method on the class of the passed instance , not the declaring * class . Useful with proxies * @ param instance The instance to invoke * @ param manager The Bean manager * @ return A reference to the instance */ public < X > X invokeOnInstance ( Object instance , Object ... parameters ) throws IllegalArgumentException , SecurityException , IllegalAccessException , InvocationTargetException , NoSuchMethodException { } }
final Map < Class < ? > , Method > methods = this . methods ; Method method = methods . get ( instance . getClass ( ) ) ; if ( method == null ) { // the same method may be written to the map twice , but that is ok // lookupMethod is very slow Method delegate = annotatedMethod . getJavaMember ( ) ; method = SecurityActions . lookupMethod ( instance . getClass ( ) , delegate . getName ( ) , delegate . getParameterTypes ( ) ) ; SecurityActions . ensureAccessible ( method ) ; synchronized ( this ) { final Map < Class < ? > , Method > newMethods = new HashMap < Class < ? > , Method > ( methods ) ; newMethods . put ( instance . getClass ( ) , method ) ; this . methods = WeldCollections . immutableMapView ( newMethods ) ; } } return cast ( method . invoke ( instance , parameters ) ) ;
public class GUID { /** * Gets GUID as byte array . * @ param GUID GUID . * @ return GUID as byte array . */ public static byte [ ] getGuidAsByteArray ( final String GUID ) { } }
final UUID uuid = UUID . fromString ( GUID ) ; final ByteBuffer buff = ByteBuffer . wrap ( new byte [ 16 ] ) ; buff . putLong ( uuid . getMostSignificantBits ( ) ) ; buff . putLong ( uuid . getLeastSignificantBits ( ) ) ; byte [ ] res = new byte [ ] { buff . get ( 3 ) , buff . get ( 2 ) , buff . get ( 1 ) , buff . get ( 0 ) , buff . get ( 5 ) , buff . get ( 4 ) , buff . get ( 7 ) , buff . get ( 6 ) , buff . get ( 8 ) , buff . get ( 9 ) , buff . get ( 10 ) , buff . get ( 11 ) , buff . get ( 12 ) , buff . get ( 13 ) , buff . get ( 14 ) , buff . get ( 15 ) , } ; return res ;
public class UniversalSingleStorageJdbcQueue { /** * { @ inheritDoc } */ @ Override protected boolean removeFromEphemeralStorage ( Connection conn , IQueueMessage < Long , byte [ ] > _msg ) { } }
if ( ! ( _msg instanceof UniversalIdIntQueueMessage ) ) { throw new IllegalArgumentException ( "This method requires an argument of type [" + UniversalIdIntQueueMessage . class . getName ( ) + "]!" ) ; } UniversalIdIntQueueMessage msg = ( UniversalIdIntQueueMessage ) _msg ; int numRows = getJdbcHelper ( ) . execute ( conn , SQL_REMOVE_FROM_EPHEMERAL , getQueueName ( ) , msg . getId ( ) ) ; return numRows > 0 ;
public class AmazonElasticFileSystemClient { /** * Returns the description of a specific Amazon EFS file system if either the file system < code > CreationToken < / code > * or the < code > FileSystemId < / code > is provided . Otherwise , it returns descriptions of all file systems owned by the * caller ' s AWS account in the AWS Region of the endpoint that you ' re calling . * When retrieving all file system descriptions , you can optionally specify the < code > MaxItems < / code > parameter to * limit the number of descriptions in a response . Currently , this number is automatically set to 10 . If more file * system descriptions remain , Amazon EFS returns a < code > NextMarker < / code > , an opaque token , in the response . In * this case , you should send a subsequent request with the < code > Marker < / code > request parameter set to the value * of < code > NextMarker < / code > . * To retrieve a list of your file system descriptions , this operation is used in an iterative process , where * < code > DescribeFileSystems < / code > is called first without the < code > Marker < / code > and then the operation continues * to call it with the < code > Marker < / code > parameter set to the value of the < code > NextMarker < / code > from the * previous response until the response has no < code > NextMarker < / code > . * The order of file systems returned in the response of one < code > DescribeFileSystems < / code > call and the order of * file systems returned across the responses of a multi - call iteration is unspecified . * This operation requires permissions for the < code > elasticfilesystem : DescribeFileSystems < / code > action . * @ param describeFileSystemsRequest * @ return Result of the DescribeFileSystems operation returned by the service . * @ throws BadRequestException * Returned if the request is malformed or contains an error such as an invalid parameter value or a missing * required parameter . * @ throws InternalServerErrorException * Returned if an error occurred on the server side . * @ throws FileSystemNotFoundException * Returned if the specified < code > FileSystemId < / code > value doesn ' t exist in the requester ' s AWS account . * @ sample AmazonElasticFileSystem . DescribeFileSystems * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticfilesystem - 2015-02-01 / DescribeFileSystems " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeFileSystemsResult describeFileSystems ( DescribeFileSystemsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeFileSystems ( request ) ;
public class PropertyConnectionCalculator { /** * Resolves the temporary properties of the given property map . It replaces the temporary properties in the values * of the given map with the values of the temporary property it replaces . This procedure is done until there are no * more temporary fields present in the values of the map . */ private void resolveTemporaryProperties ( Map < String , Set < String > > map ) { } }
boolean temporaryPresent = false ; do { temporaryPresent = false ; for ( Map . Entry < String , Set < String > > entry : map . entrySet ( ) ) { Set < String > newProperties = new HashSet < String > ( ) ; Iterator < String > properties = entry . getValue ( ) . iterator ( ) ; while ( properties . hasNext ( ) ) { String property = properties . next ( ) ; if ( isTemporaryProperty ( property ) ) { LOGGER . debug ( "Resolve temporary field {} for property {}" , entry . getKey ( ) , property ) ; temporaryPresent = true ; newProperties . addAll ( map . get ( property ) ) ; properties . remove ( ) ; } } entry . getValue ( ) . addAll ( newProperties ) ; } } while ( temporaryPresent ) ;
public class Ci_HelpTable { /** * Processes general help with no specific command requested . * @ param at table to add help information to * @ param toHelp the command to help with */ protected void specificHelp ( AsciiTable at , String toHelp ) { } }
if ( this . skbShell . getCommandMap ( ) . containsKey ( toHelp ) ) { // we have a command to show help for , collect all information and present help SkbShellCommand ssc = this . skbShell . getCommandMap ( ) . get ( toHelp ) . getCommands ( ) . get ( toHelp ) ; TreeMap < String , SkbShellArgument > args = new TreeMap < > ( ) ; if ( ssc . getArguments ( ) != null ) { for ( SkbShellArgument ssa : ssc . getArguments ( ) ) { if ( ssa . isOptional ( ) ) { args . put ( "[" + ssa . getKey ( ) + "]" , ssa ) ; } else { args . put ( "<" + ssa . getKey ( ) + ">" , ssa ) ; } } } at . addRow ( ssc . getCommand ( ) , new StrBuilder ( ) . appendWithSeparators ( args . keySet ( ) , ", " ) ) ; at . addRow ( "" , ssc . getDescription ( ) ) ; for ( SkbShellArgument ssa : args . values ( ) ) { if ( ssa . valueSet ( ) != null && ssa . addedHelp ( ) != null ) { at . addRow ( "" , FormattingTupleWrapper . create ( " -- <{}> of type {} - {} - {} - value set {}" , ssa . getKey ( ) , ssa . getType ( ) . name ( ) , ssa . getDescription ( ) , ssa . addedHelp ( ) , ArrayUtils . toString ( ssa . valueSet ( ) ) ) ) ; } else if ( ssa . valueSet ( ) != null && ssa . addedHelp ( ) == null ) { at . addRow ( "" , FormattingTupleWrapper . create ( " -- <{}> of type {} - {} - value set {}" , ssa . getKey ( ) , ssa . getType ( ) . name ( ) , ssa . getDescription ( ) , ArrayUtils . toString ( ssa . valueSet ( ) ) ) ) ; } else if ( ssa . valueSet ( ) == null && ssa . addedHelp ( ) != null ) { at . addRow ( "" , FormattingTupleWrapper . create ( " -- <{}> of type {} - {} - {}" , ssa . getKey ( ) , ssa . getType ( ) . name ( ) , ssa . getDescription ( ) , ssa . addedHelp ( ) ) ) ; } else { at . addRow ( "" , FormattingTupleWrapper . create ( " -- <{}> of type {} - {}" , ssa . getKey ( ) , ssa . getType ( ) . name ( ) , ssa . getDescription ( ) ) ) ; } } if ( ssc . addedHelp ( ) != null ) { at . addRow ( "" , ssc . addedHelp ( ) ) ; } } else { MessageConsole . conInfo ( "" ) ; MessageConsole . conInfo ( "{}: no command {} found for help, try 'help' to see all available commands" , new Object [ ] { this . skbShell . getPromptName ( ) , toHelp } ) ; }
public class EcorePackageRenameStrategy { /** * Change the package name . * @ param newName the new name . * @ param resourceSet the set of resource to use . */ protected void setPackageName ( String newName , ResourceSet resourceSet ) { } }
final EObject object = resourceSet . getEObject ( this . uriProvider . apply ( resourceSet ) , true ) ; if ( object instanceof SarlScript ) { ( ( SarlScript ) object ) . setPackage ( newName ) ; } else { throw new RefactoringException ( "SARL script not loaded." ) ; // $ NON - NLS - 1 $ }
public class IntStream { /** * Performs a reduction on the elements of this stream , using the provided * identity value and an associative accumulation function , and returns the * reduced value . * < p > The { @ code identity } value must be an identity for the accumulator * function . This means that for all { @ code x } , * { @ code accumulator . apply ( identity , x ) } is equal to { @ code x } . * The { @ code accumulator } function must be an associative function . * < p > This is a terminal operation . * < p > Example : * < pre > * identity : 0 * accumulator : ( a , b ) - & gt ; a + b * stream : [ 1 , 2 , 3 , 4 , 5] * result : 15 * < / pre > * @ param identity the identity value for the accumulating function * @ param op an associative non - interfering stateless function for * combining two values * @ return the result of the reduction * @ see # sum ( ) * @ see # min ( ) * @ see # max ( ) */ public int reduce ( int identity , @ NotNull IntBinaryOperator op ) { } }
int result = identity ; while ( iterator . hasNext ( ) ) { int value = iterator . nextInt ( ) ; result = op . applyAsInt ( result , value ) ; } return result ;
public class ClassDescriptorDef { /** * Returns the collection definition of the given name if it exists . * @ param name The name of the collection * @ return The collection definition or < code > null < / code > if there is no such collection */ public CollectionDescriptorDef getCollection ( String name ) { } }
CollectionDescriptorDef collDef = null ; for ( Iterator it = _collections . iterator ( ) ; it . hasNext ( ) ; ) { collDef = ( CollectionDescriptorDef ) it . next ( ) ; if ( collDef . getName ( ) . equals ( name ) ) { return collDef ; } } return null ;
public class AbstractLinkedList { /** * Replaces an entry in the list . * @ param oldEntry * the entry to be replaced . * @ param newEntry * the replacement entry . */ protected void replaceEntry ( T oldEntry , T newEntry ) { } }
T prev = oldEntry . getPrev ( ) ; T next = newEntry . getNext ( ) ; if ( prev != null ) { prev . setNext ( newEntry ) ; } else { head = newEntry ; } if ( next != null ) { next . setPrev ( newEntry ) ; } else { last = newEntry ; }
public class EntityUtils { /** * Adds the hit effects into the world for the specified { @ link IBlockState } . * @ param world the world * @ param target the target * @ param particleManager the effect renderer * @ param states the states */ @ SideOnly ( Side . CLIENT ) public static void addHitEffects ( World world , RayTraceResult target , ParticleManager particleManager , IBlockState ... states ) { } }
BlockPos pos = target . getBlockPos ( ) ; if ( ArrayUtils . isEmpty ( states ) ) states = new IBlockState [ ] { world . getBlockState ( pos ) } ; IBlockState baseState = world . getBlockState ( pos ) ; if ( baseState . getRenderType ( ) != EnumBlockRenderType . INVISIBLE ) return ; double fxX = pos . getX ( ) + world . rand . nextDouble ( ) ; double fxY = pos . getY ( ) + world . rand . nextDouble ( ) ; double fxZ = pos . getZ ( ) + world . rand . nextDouble ( ) ; AxisAlignedBB aabb = baseState . getBoundingBox ( world , pos ) ; switch ( target . sideHit ) { case DOWN : fxY = pos . getY ( ) + aabb . minY - 0.1F ; break ; case UP : fxY = pos . getY ( ) + aabb . maxY + 0.1F ; break ; case NORTH : fxZ = pos . getZ ( ) + aabb . minZ - 0.1F ; break ; case SOUTH : fxZ = pos . getZ ( ) + aabb . maxY + 0.1F ; break ; case EAST : fxX = pos . getX ( ) + aabb . maxX + 0.1F ; break ; case WEST : fxX = pos . getX ( ) + aabb . minX + 0.1F ; break ; default : break ; } int id = Block . getStateId ( states [ world . rand . nextInt ( states . length ) ] ) ; ParticleDigging . Factory factory = new ParticleDigging . Factory ( ) ; ParticleDigging fx = ( ParticleDigging ) factory . createParticle ( 0 , world , fxX , fxY , fxZ , 0 , 0 , 0 , id ) ; fx . multiplyVelocity ( 0.2F ) . multipleParticleScaleBy ( 0.6F ) ; particleManager . addEffect ( fx ) ;
public class MainController { /** * init fxml when loaded . */ @ PostConstruct public void init ( ) throws Exception { } }
// init the title hamburger icon final JFXTooltip burgerTooltip = new JFXTooltip ( "Open drawer" ) ; drawer . setOnDrawerOpening ( e -> { final Transition animation = titleBurger . getAnimation ( ) ; burgerTooltip . setText ( "Close drawer" ) ; animation . setRate ( 1 ) ; animation . play ( ) ; } ) ; drawer . setOnDrawerClosing ( e -> { final Transition animation = titleBurger . getAnimation ( ) ; burgerTooltip . setText ( "Open drawer" ) ; animation . setRate ( - 1 ) ; animation . play ( ) ; } ) ; titleBurgerContainer . setOnMouseClicked ( e -> { if ( drawer . isClosed ( ) || drawer . isClosing ( ) ) { drawer . open ( ) ; } else { drawer . close ( ) ; } } ) ; FXMLLoader loader = new FXMLLoader ( getClass ( ) . getResource ( "/fxml/ui/popup/MainPopup.fxml" ) ) ; loader . setController ( new InputController ( ) ) ; toolbarPopup = new JFXPopup ( loader . load ( ) ) ; optionsBurger . setOnMouseClicked ( e -> toolbarPopup . show ( optionsBurger , PopupVPosition . TOP , PopupHPosition . RIGHT , - 12 , 15 ) ) ; JFXTooltip . setVisibleDuration ( Duration . millis ( 3000 ) ) ; JFXTooltip . install ( titleBurgerContainer , burgerTooltip , Pos . BOTTOM_CENTER ) ; // create the inner flow and content context = new ViewFlowContext ( ) ; // set the default controller Flow innerFlow = new Flow ( ButtonController . class ) ; final FlowHandler flowHandler = innerFlow . createHandler ( context ) ; context . register ( "ContentFlowHandler" , flowHandler ) ; context . register ( "ContentFlow" , innerFlow ) ; final Duration containerAnimationDuration = Duration . millis ( 320 ) ; drawer . setContent ( flowHandler . start ( new ExtendedAnimatedFlowContainer ( containerAnimationDuration , SWIPE_LEFT ) ) ) ; context . register ( "ContentPane" , drawer . getContent ( ) . get ( 0 ) ) ; // side controller will add links to the content flow Flow sideMenuFlow = new Flow ( SideMenuController . class ) ; final FlowHandler sideMenuFlowHandler = sideMenuFlow . createHandler ( context ) ; drawer . setSidePane ( sideMenuFlowHandler . start ( new ExtendedAnimatedFlowContainer ( containerAnimationDuration , SWIPE_LEFT ) ) ) ;
public class PriceTypeEnumeration { /** * Returns a PriceTypeEnumeration based on the String value or null . * @ param value Value to search for . * @ return PriceTypeEnumeration or null . */ public static PriceTypeEnumeration findByValue ( final String value ) { } }
if ( value . equalsIgnoreCase ( "negotiable" ) ) { return PriceTypeEnumeration . NEGOTIABLE ; } else { return PriceTypeEnumeration . STARTING ; }
public class Example { /** * Creates a converter which applies { @ code inputConverter } to the input of * each example , and { @ code outputConverter } to the output . * @ param converter * @ return */ public static < A , B , C , D > Converter < Example < A , B > , Example < C , D > > converter ( Converter < A , C > inputConverter , Converter < B , D > outputConverter ) { } }
return new ExampleConverter < A , B , C , D > ( inputConverter , outputConverter ) ;
public class ProjectNodeSupport { /** * Return a list of resource model configuration * @ param serviceName * @ param keyprefix prefix for properties * @ return List of Maps , each map containing " type " : String , " props " : Properties */ public List < ExtPluginConfiguration > listPluginConfigurations ( final String keyprefix , final String serviceName , boolean extra ) { } }
return listPluginConfigurations ( projectConfig . getProjectProperties ( ) , keyprefix , serviceName , extra ) ;
public class ValidateUtils { /** * Validates the operation inputs . * @ param inputs @ throws Exception for invalid inputs */ public static void validateInputs ( EditXmlInputs inputs ) throws Exception { } }
validateXmlAndFilePathInputs ( inputs . getXml ( ) , inputs . getFilePath ( ) ) ; if ( Constants . Inputs . MOVE_ACTION . equals ( inputs . getAction ( ) ) ) { validateIsNotEmpty ( inputs . getXpath2 ( ) , "xpath2 input is required for action 'move' " ) ; } if ( ! Constants . Inputs . SUBNODE_ACTION . equals ( inputs . getAction ( ) ) && ! Constants . Inputs . MOVE_ACTION . equals ( inputs . getAction ( ) ) ) { validateIsNotEmpty ( inputs . getType ( ) , "type input is required for action '" + inputs . getAction ( ) + "'" ) ; if ( ! Constants . Inputs . TYPE_ELEM . equals ( inputs . getType ( ) ) && ! Constants . Inputs . TYPE_ATTR . equals ( inputs . getType ( ) ) && ! Constants . Inputs . TYPE_TEXT . equals ( inputs . getType ( ) ) ) { throw new Exception ( "Invalid type. Only supported : " + Constants . Inputs . TYPE_ELEM + ", " + Constants . Inputs . TYPE_ATTR + ", " + Constants . Inputs . TYPE_TEXT ) ; } if ( Constants . Inputs . TYPE_ATTR . equals ( inputs . getType ( ) ) ) { validateIsNotEmpty ( inputs . getName ( ) , "name input is required for type 'attr' " ) ; } }
public class Dia { /** * If not set , defaults to the last path segment of path , with any " . dia " extension stripped . */ @ Override public String getLabel ( ) { } }
String l = label ; if ( l != null ) return l ; String p = path ; if ( p != null ) { String filename = p . substring ( p . lastIndexOf ( '/' ) + 1 ) ; if ( filename . endsWith ( DOT_EXTENSION ) ) filename = filename . substring ( 0 , filename . length ( ) - DOT_EXTENSION . length ( ) ) ; if ( filename . isEmpty ( ) ) throw new IllegalArgumentException ( "Invalid filename for diagram: " + p ) ; return filename ; } throw new IllegalStateException ( "Cannot get label, neither label nor path set" ) ;
public class ResourceCopy { /** * Copies the file < tt > file < / tt > to the directory < tt > dir < / tt > , keeping the structure relative to < tt > rel < / tt > . * @ param file the file to copy * @ param rel the base ' relative ' * @ param dir the directory * @ param mojo the mojo * @ param filtering the filtering component * @ param additionalProperties additional properties * @ throws IOException if the file cannot be copied . */ public static void copyFileToDir ( File file , File rel , File dir , AbstractWisdomMojo mojo , MavenResourcesFiltering filtering , Properties additionalProperties ) throws IOException { } }
if ( filtering == null ) { File out = computeRelativeFile ( file , rel , dir ) ; if ( out . getParentFile ( ) != null ) { mojo . getLog ( ) . debug ( "Creating " + out . getParentFile ( ) + " : " + out . getParentFile ( ) . mkdirs ( ) ) ; FileUtils . copyFileToDirectory ( file , out . getParentFile ( ) ) ; } else { throw new IOException ( "Cannot copy file - parent directory not accessible for " + file . getAbsolutePath ( ) ) ; } } else { Resource resource = new Resource ( ) ; resource . setDirectory ( rel . getAbsolutePath ( ) ) ; resource . setFiltering ( true ) ; resource . setTargetPath ( dir . getAbsolutePath ( ) ) ; resource . setIncludes ( ImmutableList . of ( "**/" + file . getName ( ) ) ) ; List < String > excludedExtensions = new ArrayList < > ( ) ; excludedExtensions . addAll ( filtering . getDefaultNonFilteredFileExtensions ( ) ) ; excludedExtensions . addAll ( NON_FILTERED_EXTENSIONS ) ; MavenResourcesExecution exec = new MavenResourcesExecution ( ImmutableList . of ( resource ) , dir , mojo . project , "UTF-8" , Collections . < String > emptyList ( ) , excludedExtensions , mojo . session ) ; if ( additionalProperties != null ) { exec . setAdditionalProperties ( additionalProperties ) ; } exec . setEscapeString ( "\\" ) ; try { filtering . filterResources ( exec ) ; } catch ( MavenFilteringException e ) { throw new IOException ( "Error while copying resources" , e ) ; } }
public class ParameterServerSubscriber { /** * When this is a slave node * it returns the connection url for this node * and the associated master connection urls in the form of : * host : port : streamId * @ return the slave connection info */ public SlaveConnectionInfo slaveConnectionInfo ( ) { } }
if ( isMaster ( ) ) throw new IllegalStateException ( "Unable to determine slave connection info. This is a master node" ) ; return SlaveConnectionInfo . builder ( ) . connectionUrl ( subscriber . connectionUrl ( ) ) . masterUrl ( publishMasterUrl ) . build ( ) ;
public class CmsDefaultPageEditor { /** * Returns the OpenCms VFS uri of the template of the current page . < p > * @ return the OpenCms VFS uri of the template of the current page */ public String getUriTemplate ( ) { } }
String result = "" ; try { result = getCms ( ) . readPropertyObject ( getParamTempfile ( ) , CmsPropertyDefinition . PROPERTY_TEMPLATE , true ) . getValue ( "" ) ; } catch ( CmsException e ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_READ_TEMPLATE_PROP_FAILED_0 ) , e ) ; } return result ;
public class Configuration { /** * Get an input stream attached to the configuration resource with the * given < code > name < / code > . * @ param name configuration resource name . * @ return an input stream attached to the resource . */ public InputStream getConfResourceAsInputStream ( String name ) { } }
try { URL url = getResource ( name ) ; if ( url == null ) { LOG . info ( name + " not found" ) ; return null ; } else { LOG . info ( "found resource " + name + " at " + url ) ; } return url . openStream ( ) ; } catch ( Exception e ) { return null ; }
public class ConcurrentLinkedDeque { /** * Unlinks non - null last node . */ private void unlinkLast ( Node < E > last , Node < E > prev ) { } }
// assert last ! = null ; // assert prev ! = null ; // assert last . item = = null ; for ( Node < E > o = null , p = prev , q ; ; ) { if ( p . item != null || ( q = p . prev ) == null ) { if ( o != null && p . next != p && last . casPrev ( prev , p ) ) { skipDeletedSuccessors ( p ) ; if ( last . next == null && ( p . prev == null || p . item != null ) && p . next == last ) { updateHead ( ) ; // Ensure o is not reachable from head updateTail ( ) ; // Ensure o is not reachable from tail // Finally , actually gc - unlink o . lazySetPrev ( o ) ; o . lazySetNext ( nextTerminator ( ) ) ; } } return ; } else if ( p == q ) return ; else { o = p ; p = q ; } }
public class XMLUtil { /** * Replies the float value that corresponds to the specified attribute ' s path . * < p > The path is an ordered list of tag ' s names and ended by the name of * the attribute . * @ param document is the XML document to explore . * @ param caseSensitive indicates of the { @ code path } ' s components are case sensitive . * @ param defaultValue is the default value to reply . * @ param path is the list of and ended by the attribute ' s name . * @ return the float value of the specified attribute or < code > null < / code > if * it was node found in the document */ @ Pure public static Float getAttributeFloatWithDefault ( Node document , boolean caseSensitive , Float defaultValue , String ... path ) { } }
assert document != null : AssertMessages . notNullParameter ( 0 ) ; final String v = getAttributeValue ( document , caseSensitive , 0 , path ) ; if ( v != null ) { try { return Float . parseFloat ( v ) ; } catch ( NumberFormatException e ) { } } return defaultValue ;
public class SipStackTool { /** * Initialize SipStack using provided properties */ public SipStack initializeSipStack ( String transport , String myPort , Properties myProperties ) throws Exception { } }
/* * http : / / code . google . com / p / mobicents / issues / detail ? id = 3121 * Reset sipStack when calling initializeSipStack method */ tearDown ( ) ; try { sipStack = new SipStack ( transport , Integer . valueOf ( myPort ) , myProperties ) ; logger . info ( "SipStack - " + sipStackName + " - created!" ) ; } catch ( Exception ex ) { logger . info ( "Exception: " + ex . getClass ( ) . getName ( ) + ": " + ex . getMessage ( ) ) ; throw ex ; } initialized = true ; return sipStack ;
public class UploadObjectObserver { /** * < ! - - - Notified from * { @ link AmazonS3EncryptionClient # uploadObject ( UploadObjectRequest ) } when * all parts have been successfully uploaded to S3 . - - - > This method is * responsible for finishing off the upload by making a complete multi - part * upload request to S3 with the given list of etags . * @ param partETags * all the etags returned from S3 for the previous part uploads . * @ return the completed multi - part upload result */ public CompleteMultipartUploadResult onCompletion ( List < PartETag > partETags ) { } }
return s3 . completeMultipartUpload ( new CompleteMultipartUploadRequest ( req . getBucketName ( ) , req . getKey ( ) , uploadId , partETags ) ) ;
public class ListDeviceEventsResult { /** * The device events requested for the device ARN . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDeviceEvents ( java . util . Collection ) } or { @ link # withDeviceEvents ( java . util . Collection ) } if you want to * override the existing values . * @ param deviceEvents * The device events requested for the device ARN . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListDeviceEventsResult withDeviceEvents ( DeviceEvent ... deviceEvents ) { } }
if ( this . deviceEvents == null ) { setDeviceEvents ( new java . util . ArrayList < DeviceEvent > ( deviceEvents . length ) ) ; } for ( DeviceEvent ele : deviceEvents ) { this . deviceEvents . add ( ele ) ; } return this ;
public class LocalisationManager { /** * Method searchForPtoPOutputHandler * < p > Method will attempt to get an output handler to a remote localisation * If the outputHandler does not exist , a transmit queue is created etc * @ param preferredME * @ param localMessage * @ return * @ throws SIRollbackException * @ throws SIConnectionLostException * @ throws SIResourceException */ private OutputHandler searchForPtoPOutputHandler ( SIBUuid8 preferredME , boolean localMessage , AbstractRemoteSupport remoteSupport , HashSet < SIBUuid8 > scopedMEs ) throws SIRollbackException , SIConnectionLostException , SIResourceException { } }
// Venu temp // For Liberty runtime , there would not be WLM . In this release of Liberty , in runtime this // function is not get called . Hence just returning null return null ;
public class ContainerModule { /** * Executes the added test modules and their callbacks in the order of their addition using * { @ link ScriptContext # run ( TestModule ) } . */ @ Override public void execute ( ) { } }
// save archive dir of the outer module so it can be reset later String previousArchiveDir = archiveDirProvider . get ( ) . getPath ( ) ; try { // make sure archive dirs are within that of the container module moduleArchiveDir = moduleArchiveDirProvider . get ( ) . getPath ( ) ; config . put ( JFunkConstants . ARCHIVE_DIR , moduleArchiveDir ) ; for ( ModuleWithCallbacks moduleWithCallbacks : modulesWithCallbacks ) { TestModule testModule = moduleWithCallbacks . testModule ; Runnable beforeModuleCallback = moduleWithCallbacks . beforeModuleCallback ; if ( beforeModuleCallback != null ) { beforeModuleCallback . run ( ) ; } scriptContext . run ( testModule ) ; Runnable afterModuleCallback = moduleWithCallbacks . afterModuleCallback ; if ( afterModuleCallback != null ) { afterModuleCallback . run ( ) ; } } } finally { // reset for outer module , so property is correctly archived config . put ( JFunkConstants . ARCHIVE_DIR , previousArchiveDir ) ; // it does not make sense to archive datasets in the container module , // these would be those of the test module executed last config . put ( JFunkConstants . ARCHIVE_DATASETS , "false" ) ; }
public class KryoUtils { /** * Tries to copy the given record from using the provided Kryo instance . If this fails , then * the record from is copied by serializing it into a byte buffer and deserializing it from * there . * @ param from Element to copy * @ param kryo Kryo instance to use * @ param serializer TypeSerializer which is used in case of a Kryo failure * @ param < T > Type of the element to be copied * @ return Copied element */ public static < T > T copy ( T from , Kryo kryo , TypeSerializer < T > serializer ) { } }
try { return kryo . copy ( from ) ; } catch ( KryoException ke ) { // Kryo could not copy the object - - > try to serialize / deserialize the object try { byte [ ] byteArray = InstantiationUtil . serializeToByteArray ( serializer , from ) ; return InstantiationUtil . deserializeFromByteArray ( serializer , byteArray ) ; } catch ( IOException ioe ) { throw new RuntimeException ( "Could not copy object by serializing/deserializing" + " it." , ioe ) ; } }
public class ModifyInstanceCreditSpecificationRequest { /** * Information about the credit option for CPU usage . * @ return Information about the credit option for CPU usage . */ public java . util . List < InstanceCreditSpecificationRequest > getInstanceCreditSpecifications ( ) { } }
if ( instanceCreditSpecifications == null ) { instanceCreditSpecifications = new com . amazonaws . internal . SdkInternalList < InstanceCreditSpecificationRequest > ( ) ; } return instanceCreditSpecifications ;
public class GridTable { /** * Search through the buffers for this bookmark . * @ return int index in table ; or - 1 if not found . * @ param bookmark java . lang . Object The bookmark to search for . * @ param iHandleType The bookmark type . */ public int bookmarkToIndex ( Object bookmark , int iHandleType ) { } }
if ( bookmark == null ) return - 1 ; int iTargetPosition = m_gridBuffer . bookmarkToIndex ( bookmark , iHandleType ) ; if ( iTargetPosition == - 1 ) iTargetPosition = m_gridList . bookmarkToIndex ( bookmark , iHandleType ) ; return iTargetPosition ; // Target position
public class TreeUtil { /** * Returns the first descendant of the given node that is not a ParenthesizedExpression . */ public static Expression trimParentheses ( Expression node ) { } }
while ( node instanceof ParenthesizedExpression ) { node = ( ( ParenthesizedExpression ) node ) . getExpression ( ) ; } return node ;
public class StandardClassBodyEmitter { /** * / * ( non - Javadoc ) * @ see com . pogofish . jadt . emitter . ClassBodyEmitter # emitToString ( com . pogofish . jadt . emitter . Sink , com . pogofish . jadt . ast . Constructor ) */ @ Override public void emitToString ( Sink sink , String indent , Constructor constructor ) { } }
logger . finest ( "Generating toString() for " + constructor . name ) ; sink . write ( indent + "@Override\n" ) ; sink . write ( indent + "public String toString() {\n" ) ; sink . write ( indent + " return \"" + constructor . name ) ; if ( ! constructor . args . isEmpty ( ) ) { sink . write ( "(" ) ; boolean first = true ; for ( Arg arg : constructor . args ) { if ( first ) { first = false ; } else { sink . write ( ", " ) ; } sink . write ( arg . name + " = \" + " + arg . name + " + \"" ) ; } sink . write ( ")" ) ; } sink . write ( "\";\n" ) ; sink . write ( indent + "}" ) ;
public class JavaExprAnalyzer { /** * Analyze an expression . * @ param expr * The expression to analyze . * @ param availableIdentifiers * Total set of declarations available . * @ return The < code > Set < / code > of declarations used by the expression . * @ throws RecognitionException * If an error occurs in the parser . */ @ SuppressWarnings ( "unchecked" ) public JavaAnalysisResult analyzeExpression ( final String expr , final BoundIdentifiers availableIdentifiers ) throws RecognitionException { } }
final JavaParser parser = parse ( expr ) ; parser . conditionalOrExpression ( ) ; JavaAnalysisResult result = new JavaAnalysisResult ( ) ; result . setAnalyzedExpr ( expr ) ; result . setIdentifiers ( new HashSet < String > ( parser . getIdentifiers ( ) ) ) ; return analyze ( result , availableIdentifiers ) ;
public class ByteArrayUtil { /** * Read an unsigned short from the byte array at the given offset . * @ param array Array to read from * @ param offset Offset to read at * @ return short */ public static int readUnsignedShort ( byte [ ] array , int offset ) { } }
// First make integers to resolve signed vs . unsigned issues . int b0 = array [ offset + 0 ] & 0xFF ; int b1 = array [ offset + 1 ] & 0xFF ; return ( ( b0 << 8 ) + ( b1 << 0 ) ) ;
public class GrpSettings { /** * Sets the targetGender value for this GrpSettings . * @ param targetGender * Specifies the target gender of the { @ link LineItem } . This field * is only applicable if * { @ link # provider } is not null . */ public void setTargetGender ( com . google . api . ads . admanager . axis . v201808 . GrpTargetGender targetGender ) { } }
this . targetGender = targetGender ;
public class ApiRequestParams { /** * Convenient method to convert this typed request params into an untyped map . This map is * composed of { @ code Map < String , Object > } , { @ code List < Object > } , and basic Java data types . * This allows you to test building the request params and verify compatibility with your * prior integrations using the untyped params map * { @ link ApiResource # request ( ApiResource . RequestMethod , String , Map , Class , RequestOptions ) } . * < p > The peculiarity of this conversion is that ` EMPTY ` { @ link EnumParam } with raw * value of empty string will be converted to null . This is compatible with the existing * contract enforcing no empty string in the untyped map params . * < p > Because of the translation from ` EMPTY ` enum to null , deserializing this map back to a * request instance is lossy . The null value will not be converted back to the ` EMPTY ` enum . */ public Map < String , Object > toMap ( ) { } }
JsonObject json = GSON . toJsonTree ( this ) . getAsJsonObject ( ) ; return UNTYPED_MAP_DESERIALIZER . deserialize ( json ) ;
public class AbstractViewQuery { /** * apply fadein animation for view * @ return */ public T fadeIn ( ) { } }
if ( view != null ) { view . startAnimation ( AnimationUtils . loadAnimation ( context , android . R . anim . fade_in ) ) ; } return self ( ) ;
public class CNFEncoder { /** * Returns the current configuration of this encoder . If the encoder was constructed with a given configuration , this * configuration will always be used . Otherwise the current configuration of the formula factory is used or - if not * present - the default configuration . * @ return the current configuration of */ public CNFConfig config ( ) { } }
if ( this . config != null ) return this . config ; final Configuration cnfConfig = this . f . configurationFor ( ConfigurationType . CNF ) ; return cnfConfig != null ? ( CNFConfig ) cnfConfig : this . defaultConfig ;
public class SquigglyUtils { /** * Convert an object to a collection of maps . * @ param mapper the object mapper * @ param source the source object * @ param targetCollectionType the target collection type * @ param targetKeyType the target map key type * @ param targetValueType the target map value type * @ return collection */ public static < K , V > Collection < Map < K , V > > collectify ( ObjectMapper mapper , Object source , Class < ? extends Collection > targetCollectionType , Class < K > targetKeyType , Class < V > targetValueType ) { } }
MapType mapType = mapper . getTypeFactory ( ) . constructMapType ( Map . class , targetKeyType , targetValueType ) ; return collectify ( mapper , convertToCollection ( source ) , targetCollectionType , mapType ) ;
public class HttpResponse { public void writeHeader ( Writer writer ) throws IOException { } }
if ( _state != __MSG_EDITABLE ) throw new IllegalStateException ( __state [ _state ] + " is not EDITABLE" ) ; if ( _header == null ) throw new IllegalStateException ( "Response is destroyed" ) ; if ( getHttpRequest ( ) . getDotVersion ( ) >= 0 ) { _state = __MSG_BAD ; writer . write ( _version ) ; writer . write ( ' ' ) ; writer . write ( '0' + ( ( _status / 100 ) % 10 ) ) ; writer . write ( '0' + ( ( _status / 10 ) % 10 ) ) ; writer . write ( '0' + ( _status % 10 ) ) ; writer . write ( ' ' ) ; writer . write ( getReason ( ) ) ; writer . write ( HttpFields . __CRLF ) ; _header . write ( writer ) ; } _state = __MSG_SENDING ;
public class DeleteNamedQueryRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteNamedQueryRequest deleteNamedQueryRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteNamedQueryRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteNamedQueryRequest . getNamedQueryId ( ) , NAMEDQUERYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MongoDBSchemaManager { /** * validate method validate schema and table for the list of tableInfos . * @ param tableInfos * list of TableInfos . */ protected void validate ( List < TableInfo > tableInfos ) { } }
db = mongo . getDB ( databaseName ) ; if ( db == null ) { logger . error ( "Database " + databaseName + "does not exist" ) ; throw new SchemaGenerationException ( "database " + databaseName + "does not exist" , "mongoDb" , databaseName ) ; } else { for ( TableInfo tableInfo : tableInfos ) { if ( tableInfo . getLobColumnInfo ( ) . isEmpty ( ) ) { if ( ! db . collectionExists ( tableInfo . getTableName ( ) ) ) { logger . error ( "Collection " + tableInfo . getTableName ( ) + "does not exist in db " + db . getName ( ) ) ; throw new SchemaGenerationException ( "Collection " + tableInfo . getTableName ( ) + " does not exist in db " + db . getName ( ) , "mongoDb" , databaseName , tableInfo . getTableName ( ) ) ; } } else { checkMultipleLobs ( tableInfo ) ; if ( ! db . collectionExists ( tableInfo . getTableName ( ) + MongoDBUtils . FILES ) ) { logger . error ( "Collection " + tableInfo . getTableName ( ) + MongoDBUtils . FILES + "does not exist in db " + db . getName ( ) ) ; throw new SchemaGenerationException ( "Collection " + tableInfo . getTableName ( ) + " does not exist in db " + db . getName ( ) , "mongoDb" , databaseName , tableInfo . getTableName ( ) ) ; } if ( ! db . collectionExists ( tableInfo . getTableName ( ) + MongoDBUtils . CHUNKS ) ) { logger . error ( "Collection " + tableInfo . getTableName ( ) + MongoDBUtils . CHUNKS + "does not exist in db " + db . getName ( ) ) ; throw new SchemaGenerationException ( "Collection " + tableInfo . getTableName ( ) + " does not exist in db " + db . getName ( ) , "mongoDb" , databaseName , tableInfo . getTableName ( ) ) ; } } } }
public class AbstractMergeRunnable { /** * Check if data structures in - memory - format appropriate to merge * with legacy policies */ private boolean canMergeLegacy ( String dataStructureName ) { } }
Object mergePolicy = getMergePolicy ( dataStructureName ) ; InMemoryFormat inMemoryFormat = getInMemoryFormat ( dataStructureName ) ; return checkMergePolicySupportsInMemoryFormat ( dataStructureName , mergePolicy , inMemoryFormat , false , logger ) ;
public class AudioSelector { /** * Selects a specific PID from within an audio source ( e . g . 257 selects PID 0x101 ) . * @ param pids * Selects a specific PID from within an audio source ( e . g . 257 selects PID 0x101 ) . */ public void setPids ( java . util . Collection < Integer > pids ) { } }
if ( pids == null ) { this . pids = null ; return ; } this . pids = new java . util . ArrayList < Integer > ( pids ) ;
public class GetApiKeysResult { /** * A list of warning messages logged during the import of API keys when the < code > failOnWarnings < / code > option is * set to true . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setWarnings ( java . util . Collection ) } or { @ link # withWarnings ( java . util . Collection ) } if you want to override * the existing values . * @ param warnings * A list of warning messages logged during the import of API keys when the < code > failOnWarnings < / code > * option is set to true . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetApiKeysResult withWarnings ( String ... warnings ) { } }
if ( this . warnings == null ) { setWarnings ( new java . util . ArrayList < String > ( warnings . length ) ) ; } for ( String ele : warnings ) { this . warnings . add ( ele ) ; } return this ;
public class KeyVaultClientBaseImpl { /** * Gets information about a SAS definition for the specified storage account . This operation requires the storage / getsas permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param storageAccountName The name of the storage account . * @ param sasDefinitionName The name of the SAS definition . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < SasDefinitionBundle > getSasDefinitionAsync ( String vaultBaseUrl , String storageAccountName , String sasDefinitionName , final ServiceCallback < SasDefinitionBundle > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getSasDefinitionWithServiceResponseAsync ( vaultBaseUrl , storageAccountName , sasDefinitionName ) , serviceCallback ) ;
public class VoiceApi { /** * Set the current agent ' s state to Ready on the voice channel . * @ param reasons Information on causes for , and results of , actions taken by the user of the current DN . For details about reasons , refer to the [ * Genesys Events and Models Reference Manual * ] ( https : / / docs . genesys . com / Documentation / System / Current / GenEM / Reasons ) . * @ param extensions Media device / hardware reason codes and similar information . For details about extensions , refer to the [ * Genesys Events and Models Reference Manual * ] ( https : / / docs . genesys . com / Documentation / System / Current / GenEM / Extensions ) . ( optional ) */ public void setAgentReady ( KeyValueCollection reasons , KeyValueCollection extensions ) throws WorkspaceApiException { } }
try { VoicereadyData readyData = new VoicereadyData ( ) ; readyData . setReasons ( Util . toKVList ( reasons ) ) ; readyData . setExtensions ( Util . toKVList ( extensions ) ) ; ReadyData data = new ReadyData ( ) ; data . data ( readyData ) ; ApiSuccessResponse response = this . voiceApi . setAgentStateReady ( data ) ; throwIfNotOk ( "setAgentReady" , response ) ; } catch ( ApiException e ) { throw new WorkspaceApiException ( "setAgentReady failed." , e ) ; }
public class Webcam { /** * Get list of webcams to use . This method will wait predefined time interval for webcam devices * to be discovered . By default this time is set to 1 minute . * @ return List of webcams existing in the system * @ throws WebcamException when something is wrong * @ see Webcam # getWebcams ( long , TimeUnit ) */ public static List < Webcam > getWebcams ( ) throws WebcamException { } }
// timeout exception below will never be caught since user would have to // wait around three hundreds billion years for it to occur try { return getWebcams ( Long . MAX_VALUE ) ; } catch ( TimeoutException e ) { throw new RuntimeException ( e ) ; }
public class RequestUtils { /** * Reconstructs the request URL including query parameters . < em > Note : < / em > * the output of this method is purely for logging purposes only , thus POST * parameters are shown as if they were GET parameters and parameters are * < em > not < / em > URL encoded . */ public static String reconstructURL ( HttpServletRequest req ) { } }
StringBuffer buf = req . getRequestURL ( ) ; @ SuppressWarnings ( "unchecked" ) Map < String , String [ ] > map = req . getParameterMap ( ) ; if ( map . size ( ) > 0 ) { buf . append ( "?" ) ; for ( Map . Entry < String , String [ ] > entry : map . entrySet ( ) ) { if ( buf . charAt ( buf . length ( ) - 1 ) != '?' ) { buf . append ( "&" ) ; } buf . append ( entry . getKey ( ) ) . append ( "=" ) ; String [ ] values = entry . getValue ( ) ; if ( values . length == 1 ) { buf . append ( values [ 0 ] ) ; } else { buf . append ( "(" ) ; for ( int ii = 0 ; ii < values . length ; ii ++ ) { if ( ii > 0 ) { buf . append ( ", " ) ; } buf . append ( values [ ii ] ) ; } buf . append ( ")" ) ; } } } return buf . toString ( ) ;
public class SameDiff { /** * Get the variable name to use * for resolving a given field * for a given function during import time . * This method is u sed during { @ link DifferentialFunction # resolvePropertiesFromSameDiffBeforeExecution ( ) } * @ param function the function to get the variable name for * @ param fieldName the field name to resolve for * @ return the resolve variable name if any */ public String getVarNameForFieldAndFunction ( DifferentialFunction function , String fieldName ) { } }
return fieldVariableResolutionMapping . get ( function . getOwnName ( ) , fieldName ) ;
public class WebUtils { /** * Put ticket granting ticket in request and flow scopes . * @ param context the context * @ param ticket the ticket value */ public static void putTicketGrantingTicketInScopes ( final RequestContext context , final TicketGrantingTicket ticket ) { } }
val ticketValue = ticket != null ? ticket . getId ( ) : null ; putTicketGrantingTicketInScopes ( context , ticketValue ) ;
public class VALPNormDistance { /** * Get the maximum distance . * @ param vec Approximation vector * @ return Maximum distance of the vector */ public double getMaxDist ( VectorApproximation vec ) { } }
final int dim = lookup . length ; double maxDist = 0 ; for ( int d = 0 ; d < dim ; d ++ ) { final int vp = vec . getApproximation ( d ) ; maxDist += getPartialMaxDist ( d , vp ) ; } return FastMath . pow ( maxDist , onebyp ) ;
public class DBWrapperFactory { /** * Create a wrapper around a collection of entities . * @ param collection The collection to be wrapped . * @ param entityClass The class of the entity that the collection contains . * @ param isRevisionCollection Whether or not the collection is a collection of revision entities . * @ param handler * @ param < T > The wrapper class that is returned . * @ return The Wrapper around the collection of entities . */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public < T extends BaseWrapper < T > , U > CollectionWrapper < T > createCollection ( final Collection < U > collection , final Class < U > entityClass , boolean isRevisionCollection , final DBCollectionHandler < U > handler ) { if ( collection == null ) { return null ; } final DBCollectionWrapper wrapper = ( DBCollectionWrapper ) createCollection ( collection , entityClass , isRevisionCollection ) ; wrapper . setHandler ( handler ) ; return wrapper ;
public class EncodingUtils { /** * Base64 - encode the given byte [ ] as a string . * @ param data the byte array to encode * @ param chunked the chunked * @ return the encoded string */ public static String encodeBase64 ( final byte [ ] data , final boolean chunked ) { } }
if ( data != null && data . length > 0 ) { if ( chunked ) { return BASE64_CHUNKED_ENCODER . encodeToString ( data ) . trim ( ) ; } return BASE64_UNCHUNKED_ENCODER . encodeToString ( data ) . trim ( ) ; } return StringUtils . EMPTY ;
public class RNAUtils { /** * method to get the trimmed nucleotide sequence * @ param polymer given rna / dna polymer * @ return trimmed nucleotide sequence * @ throws RNAUtilsException if the polymer is not a RNA / DNA * @ throws HELM2HandledException if it contains HELM2 specific features , so that it can not be casted to HELM1 Format * @ throws ChemistryException if chemistry engine can not be initialized */ public static String getTrimmedNucleotideSequence ( PolymerNotation polymer ) throws RNAUtilsException , HELM2HandledException , ChemistryException { } }
checkRNA ( polymer ) ; List < Nucleotide > list = getNucleotideList ( polymer ) ; int start = 0 ; Nucleotide na = list . get ( start ) ; while ( null == na . getBaseMonomer ( ) ) { start ++ ; na = list . get ( start ) ; } int end = list . size ( ) - 1 ; na = list . get ( end ) ; while ( null == na . getBaseMonomer ( ) ) { end -- ; na = list . get ( end ) ; } StringBuffer sb = new StringBuffer ( ) ; for ( int i = start ; i <= end ; i ++ ) { sb . append ( list . get ( i ) . getNaturalAnalog ( ) ) ; } return sb . toString ( ) ;
public class CouchbaseConnection { /** * Appends a { @ link JsonDocument } ' s content to an existing one . * @ param documentId the unique ID of the document * @ param json the JSON String representing the document to append * @ return the updated { @ link Document } * @ see Bucket # append ( Document ) */ public JsonDocument append ( String documentId , String json ) { } }
this . bucket . append ( JsonDocument . create ( documentId , JsonObject . fromJson ( json ) ) ) ; return this . get ( documentId ) ;
public class CommerceSubscriptionEntryLocalServiceBaseImpl { /** * Deletes the commerce subscription entry with the primary key from the database . Also notifies the appropriate model listeners . * @ param commerceSubscriptionEntryId the primary key of the commerce subscription entry * @ return the commerce subscription entry that was removed * @ throws PortalException if a commerce subscription entry with the primary key could not be found */ @ Indexable ( type = IndexableType . DELETE ) @ Override public CommerceSubscriptionEntry deleteCommerceSubscriptionEntry ( long commerceSubscriptionEntryId ) throws PortalException { } }
return commerceSubscriptionEntryPersistence . remove ( commerceSubscriptionEntryId ) ;
public class LoginServlet { /** * GET simply returns login . html */ @ Override protected void doGet ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { } }
if ( authConfiguration . isKeycloakEnabled ( ) ) { redirector . doRedirect ( request , response , "/" ) ; } else { redirector . doForward ( request , response , "/login.html" ) ; }
public class MarkLogicClient { /** * setter for GraphPermissions * @ param graphPerms */ public void setGraphPerms ( GraphPermissions graphPerms ) { } }
if ( graphPerms != null ) { getClient ( ) . setGraphPerms ( graphPerms ) ; } else { getClient ( ) . setGraphPerms ( getClient ( ) . getDatabaseClient ( ) . newGraphManager ( ) . newGraphPermissions ( ) ) ; }
public class BoundedLocalCache { /** * Returns the current value from a computeIfAbsent invocation . */ @ Nullable V doComputeIfAbsent ( K key , Object keyRef , Function < ? super K , ? extends V > mappingFunction , long [ ] now , boolean recordStats ) { } }
@ SuppressWarnings ( "unchecked" ) V [ ] oldValue = ( V [ ] ) new Object [ 1 ] ; @ SuppressWarnings ( "unchecked" ) V [ ] newValue = ( V [ ] ) new Object [ 1 ] ; @ SuppressWarnings ( "unchecked" ) K [ ] nodeKey = ( K [ ] ) new Object [ 1 ] ; @ SuppressWarnings ( { "unchecked" , "rawtypes" } ) Node < K , V > [ ] removed = new Node [ 1 ] ; int [ ] weight = new int [ 2 ] ; // old , new RemovalCause [ ] cause = new RemovalCause [ 1 ] ; Node < K , V > node = data . compute ( keyRef , ( k , n ) -> { if ( n == null ) { newValue [ 0 ] = mappingFunction . apply ( key ) ; if ( newValue [ 0 ] == null ) { return null ; } now [ 0 ] = expirationTicker ( ) . read ( ) ; weight [ 1 ] = weigher . weigh ( key , newValue [ 0 ] ) ; n = nodeFactory . newNode ( key , keyReferenceQueue ( ) , newValue [ 0 ] , valueReferenceQueue ( ) , weight [ 1 ] , now [ 0 ] ) ; setVariableTime ( n , expireAfterCreate ( key , newValue [ 0 ] , expiry ( ) , now [ 0 ] ) ) ; return n ; } synchronized ( n ) { nodeKey [ 0 ] = n . getKey ( ) ; weight [ 0 ] = n . getWeight ( ) ; oldValue [ 0 ] = n . getValue ( ) ; if ( ( nodeKey [ 0 ] == null ) || ( oldValue [ 0 ] == null ) ) { cause [ 0 ] = RemovalCause . COLLECTED ; } else if ( hasExpired ( n , now [ 0 ] ) ) { cause [ 0 ] = RemovalCause . EXPIRED ; } else { return n ; } writer . delete ( nodeKey [ 0 ] , oldValue [ 0 ] , cause [ 0 ] ) ; newValue [ 0 ] = mappingFunction . apply ( key ) ; if ( newValue [ 0 ] == null ) { removed [ 0 ] = n ; n . retire ( ) ; return null ; } weight [ 1 ] = weigher . weigh ( key , newValue [ 0 ] ) ; n . setValue ( newValue [ 0 ] , valueReferenceQueue ( ) ) ; n . setWeight ( weight [ 1 ] ) ; now [ 0 ] = expirationTicker ( ) . read ( ) ; setVariableTime ( n , expireAfterCreate ( key , newValue [ 0 ] , expiry ( ) , now [ 0 ] ) ) ; setAccessTime ( n , now [ 0 ] ) ; setWriteTime ( n , now [ 0 ] ) ; return n ; } } ) ; if ( node == null ) { if ( removed [ 0 ] != null ) { afterWrite ( new RemovalTask ( removed [ 0 ] ) ) ; } return null ; } if ( cause [ 0 ] != null ) { if ( hasRemovalListener ( ) ) { notifyRemoval ( nodeKey [ 0 ] , oldValue [ 0 ] , cause [ 0 ] ) ; } statsCounter ( ) . recordEviction ( weight [ 0 ] , cause [ 0 ] ) ; } if ( newValue [ 0 ] == null ) { if ( ! isComputingAsync ( node ) ) { tryExpireAfterRead ( node , key , oldValue [ 0 ] , expiry ( ) , now [ 0 ] ) ; setAccessTime ( node , now [ 0 ] ) ; } afterRead ( node , now [ 0 ] , /* recordHit */ recordStats ) ; return oldValue [ 0 ] ; } if ( ( oldValue [ 0 ] == null ) && ( cause [ 0 ] == null ) ) { afterWrite ( new AddTask ( node , weight [ 1 ] ) ) ; } else { int weightedDifference = ( weight [ 1 ] - weight [ 0 ] ) ; afterWrite ( new UpdateTask ( node , weightedDifference ) ) ; } return newValue [ 0 ] ;
public class HsqldbDatabase { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . DatabaseIF # deleteSession ( java . lang . String ) */ @ Override public void deleteSession ( String sessionName ) { } }
super . deleteSession ( sessionName ) ; logger . debug ( "deleteSession " + sessionName ) ; deleteDbFile ( new File ( sessionName ) ) ; deleteDbFile ( new File ( sessionName + ".data" ) ) ; deleteDbFile ( new File ( sessionName + ".script" ) ) ; deleteDbFile ( new File ( sessionName + ".properties" ) ) ; deleteDbFile ( new File ( sessionName + ".backup" ) ) ; deleteDbFile ( new File ( sessionName + ".lobs" ) ) ;
public class TrailingHeaders { /** * Adds an HTTP trailing header with the passed { @ code name } and { @ code values } to this request . * @ param name Name of the header . * @ param values Values for the header . * @ return { @ code this } . */ public TrailingHeaders addHeader ( CharSequence name , Iterable < Object > values ) { } }
lastHttpContent . trailingHeaders ( ) . add ( name , values ) ; return this ;
public class DescribeReplicationInstanceTaskLogsResult { /** * An array of replication task log metadata . Each member of the array contains the replication task name , ARN , and * task log size ( in bytes ) . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setReplicationInstanceTaskLogs ( java . util . Collection ) } or * { @ link # withReplicationInstanceTaskLogs ( java . util . Collection ) } if you want to override the existing values . * @ param replicationInstanceTaskLogs * An array of replication task log metadata . Each member of the array contains the replication task name , * ARN , and task log size ( in bytes ) . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeReplicationInstanceTaskLogsResult withReplicationInstanceTaskLogs ( ReplicationInstanceTaskLog ... replicationInstanceTaskLogs ) { } }
if ( this . replicationInstanceTaskLogs == null ) { setReplicationInstanceTaskLogs ( new java . util . ArrayList < ReplicationInstanceTaskLog > ( replicationInstanceTaskLogs . length ) ) ; } for ( ReplicationInstanceTaskLog ele : replicationInstanceTaskLogs ) { this . replicationInstanceTaskLogs . add ( ele ) ; } return this ;
public class LepInterceptor { /** * { @ inheritDoc } */ @ Override public boolean preHandle ( HttpServletRequest request , HttpServletResponse response , Object handler ) throws IOException { } }
lepManager . beginThreadContext ( scopedContext -> { scopedContext . setValue ( THREAD_CONTEXT_KEY_TENANT_CONTEXT , tenantContextHolder . getContext ( ) ) ; scopedContext . setValue ( BINDING_KEY_AUTH_CONTEXT , xmAuthContextHolder . getContext ( ) ) ; } ) ; return true ;
public class AddressTemplate { /** * Resolve this address template against the specified statement context . * @ param context the statement context * @ param wildcards An optional list of wildcards which are used to resolve any wildcards in this address template * @ return a full qualified resource address which might be empty , but which does not contain any tokens */ public ResourceAddress resolve ( StatementContext context , String ... wildcards ) { } }
int wildcardCount = 0 ; ModelNode model = new ModelNode ( ) ; Memory < String [ ] > tupleMemory = new Memory < > ( ) ; Memory < String > valueMemory = new Memory < > ( ) ; for ( Token token : tokens ) { if ( ! token . hasKey ( ) ) { // a single token or token expression String tokenRef = token . getValue ( ) ; String [ ] resolvedValue ; if ( tokenRef . startsWith ( "{" ) ) { tokenRef = tokenRef . substring ( 1 , tokenRef . length ( ) - 1 ) ; if ( ! tupleMemory . contains ( tokenRef ) ) { tupleMemory . memorize ( tokenRef , context . collectTuples ( tokenRef ) ) ; } resolvedValue = tupleMemory . next ( tokenRef ) ; } else { assert tokenRef . contains ( "=" ) : "Invalid token expression " + tokenRef ; resolvedValue = tokenRef . split ( "=" ) ; } if ( resolvedValue == null ) { System . out . println ( "Suppress token expression '" + tokenRef + "'. It cannot be resolved" ) ; } else { model . add ( resolvedValue [ 0 ] , resolvedValue [ 1 ] ) ; } } else { // a value expression . key and value of the expression might be resolved String keyRef = token . getKey ( ) ; String valueRef = token . getValue ( ) ; String resolvedKey ; String resolvedValue ; if ( keyRef . startsWith ( "{" ) ) { keyRef = keyRef . substring ( 1 , keyRef . length ( ) - 1 ) ; if ( ! valueMemory . contains ( keyRef ) ) { valueMemory . memorize ( keyRef , context . collect ( keyRef ) ) ; } resolvedKey = valueMemory . next ( keyRef ) ; } else { resolvedKey = keyRef ; } if ( valueRef . startsWith ( "{" ) ) { valueRef = valueRef . substring ( 1 , valueRef . length ( ) - 1 ) ; if ( ! valueMemory . contains ( valueRef ) ) { valueMemory . memorize ( valueRef , context . collect ( valueRef ) ) ; } resolvedValue = valueMemory . next ( valueRef ) ; } else { resolvedValue = valueRef ; } if ( resolvedKey == null ) resolvedKey = "_blank" ; if ( resolvedValue == null ) resolvedValue = "_blank" ; // wildcards String addressValue = resolvedValue ; if ( "*" . equals ( resolvedValue ) && wildcards != null && wildcards . length > 0 && wildcardCount < wildcards . length ) { addressValue = wildcards [ wildcardCount ] ; wildcardCount ++ ; } model . add ( resolvedKey , addressValue ) ; } } return new ResourceAddress ( model ) ;
public class NetworkWatchersInner { /** * Queries status of flow log on a specified resource . * @ param resourceGroupName The name of the network watcher resource group . * @ param networkWatcherName The name of the network watcher resource . * @ param targetResourceId The target resource where getting the flow logging status . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < FlowLogInformationInner > beginGetFlowLogStatusAsync ( String resourceGroupName , String networkWatcherName , String targetResourceId , final ServiceCallback < FlowLogInformationInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginGetFlowLogStatusWithServiceResponseAsync ( resourceGroupName , networkWatcherName , targetResourceId ) , serviceCallback ) ;
public class SimpleClient { /** * { @ inheritDoc } */ @ Override public CompletableFuture < Data > getAsync ( Face face , Name name ) { } }
return getAsync ( face , getDefaultInterest ( name ) ) ;
public class StatementDMQL { /** * Returns the metadata , which is empty if the CompiledStatement does not * generate a Result . */ @ Override public ResultMetaData getResultMetaData ( ) { } }
switch ( type ) { case StatementTypes . DELETE_WHERE : case StatementTypes . INSERT : case StatementTypes . UPDATE_WHERE : case StatementTypes . MIGRATE_WHERE : return ResultMetaData . emptyResultMetaData ; default : throw Error . runtimeError ( ErrorCode . U_S0500 , "CompiledStatement.getResultMetaData()" ) ; }
public class ConfigurationLoader { /** * This method loads the configuration from the supplied URI . * @ param uri The URI * @ param type The type , or null if default ( jvm ) * @ return The configuration */ protected static CollectorConfiguration loadConfig ( String uri , String type ) { } }
final CollectorConfiguration config = new CollectorConfiguration ( ) ; if ( type == null ) { type = DEFAULT_TYPE ; } uri += java . io . File . separator + type ; File f = new File ( uri ) ; if ( ! f . isAbsolute ( ) ) { if ( f . exists ( ) ) { uri = f . getAbsolutePath ( ) ; } else if ( System . getProperties ( ) . containsKey ( "jboss.server.config.dir" ) ) { uri = System . getProperty ( "jboss.server.config.dir" ) + java . io . File . separatorChar + uri ; } else { try { URL url = Thread . currentThread ( ) . getContextClassLoader ( ) . getResource ( uri ) ; if ( url != null ) { uri = url . getPath ( ) ; } else { log . severe ( "Failed to get absolute path for uri '" + uri + "'" ) ; } } catch ( Exception e ) { log . log ( Level . SEVERE , "Failed to get absolute path for uri '" + uri + "'" , e ) ; uri = null ; } } } if ( uri != null ) { String [ ] uriParts = uri . split ( Matcher . quoteReplacement ( File . separator ) ) ; int startIndex = 0 ; // Remove any file prefix if ( uriParts [ 0 ] . equals ( "file:" ) ) { startIndex ++ ; } try { Path path = getPath ( startIndex , uriParts ) ; Files . walkFileTree ( path , new FileVisitor < Path > ( ) { @ Override public FileVisitResult postVisitDirectory ( Path path , IOException exc ) throws IOException { return FileVisitResult . CONTINUE ; } @ Override public FileVisitResult preVisitDirectory ( Path path , BasicFileAttributes attrs ) throws IOException { return FileVisitResult . CONTINUE ; } @ Override public FileVisitResult visitFile ( Path path , BasicFileAttributes attrs ) throws IOException { if ( path . toString ( ) . endsWith ( ".json" ) ) { String json = new String ( Files . readAllBytes ( path ) ) ; CollectorConfiguration childConfig = mapper . readValue ( json , CollectorConfiguration . class ) ; if ( childConfig != null ) { config . merge ( childConfig , false ) ; } } return FileVisitResult . CONTINUE ; } @ Override public FileVisitResult visitFileFailed ( Path path , IOException exc ) throws IOException { return FileVisitResult . CONTINUE ; } } ) ; } catch ( Throwable e ) { log . log ( Level . SEVERE , "Failed to load configuration" , e ) ; } } return config ;
public class StringValue { /** * ( non - Javadoc ) * @ see java . lang . Appendable # append ( java . lang . CharSequence , int , int ) */ public Appendable append ( StringValue csq , int start , int end ) { } }
final int otherLen = end - start ; grow ( this . len + otherLen ) ; System . arraycopy ( csq . value , start , this . value , this . len , otherLen ) ; this . len += otherLen ; return this ;
public class PatternsImpl { /** * Adds a batch of patterns to the specified application . * @ param appId The application ID . * @ param versionId The version ID . * @ param patterns A JSON array containing patterns . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; PatternRuleInfo & gt ; object if successful . */ public List < PatternRuleInfo > batchAddPatterns ( UUID appId , String versionId , List < PatternRuleCreateObject > patterns ) { } }
return batchAddPatternsWithServiceResponseAsync ( appId , versionId , patterns ) . toBlocking ( ) . single ( ) . body ( ) ;
public class HomekitSRP6ServerSession { /** * Increments this SRP - 6a authentication session to { @ link State # STEP _ 1 } . * < p > Argument origin : * < ul > * < li > From client : user identity ' I ' . * < li > From server database : matching salt ' s ' and password verifier ' v ' values . * < / ul > * @ param userID The identity ' I ' of the authenticating user . Must not be { @ code null } or empty . * @ param s The password salt ' s ' . Must not be { @ code null } . * @ param v The password verifier ' v ' . Must not be { @ code null } . * @ return The server public value ' B ' . * @ throws IllegalStateException If the mehod is invoked in a state other than { @ link State # INIT } . */ public BigInteger step1 ( final String userID , final BigInteger s , final BigInteger v ) { } }
// Check arguments if ( userID == null || userID . trim ( ) . isEmpty ( ) ) throw new IllegalArgumentException ( "The user identity 'I' must not be null or empty" ) ; this . userID = userID ; if ( s == null ) throw new IllegalArgumentException ( "The salt 's' must not be null" ) ; this . s = s ; if ( v == null ) throw new IllegalArgumentException ( "The verifier 'v' must not be null" ) ; this . v = v ; // Check current state if ( state != State . INIT ) throw new IllegalStateException ( "State violation: Session must be in INIT state" ) ; // Generate server private and public values k = SRP6Routines . computeK ( digest , config . N , config . g ) ; digest . reset ( ) ; b = HomekitSRP6Routines . generatePrivateValue ( config . N , random ) ; digest . reset ( ) ; B = SRP6Routines . computePublicServerValue ( config . N , config . g , k , v , b ) ; state = State . STEP_1 ; updateLastActivityTime ( ) ; return B ;
public class ConnectionPoolConnection { /** * Attempt to deal with any transaction problems . * @ throws SQLException on invalid state . */ private void resolveIncompleteTransactions ( ) throws SQLException { } }
switch ( transactionState ) { case COMPLETED : // All we know for certain is that at least one commit / rollback was called . Do nothing . break ; case STARTED : // At least one statement was created with auto - commit false & no commit / rollback . // Follow the default policy . if ( conn != null && openStatements . size ( ) > 0 ) { switch ( incompleteTransactionPolicy ) { case REPORT : throw new SQLException ( "Statement closed with incomplete transaction" , JDBConnection . SQLSTATE_INVALID_TRANSACTION_STATE ) ; case COMMIT : if ( ! conn . isClosed ( ) ) { conn . commit ( ) ; } break ; case ROLLBACK : if ( ! conn . isClosed ( ) ) { conn . rollback ( ) ; } } } break ; }
public class Record { /** * Sets the field at the given position to the given value . If the field position is larger or equal than * the current number of fields in the record , than the record is expanded to host as many columns . * The value is kept as a reference in the record until the binary representation is synchronized . Until that * point , all modifications to the value ' s object will change the value inside the record . * The binary representation is synchronized the latest when the record is emitted . It may be triggered * manually at an earlier point , but it is generally not necessary and advisable . Because the synchronization * triggers the serialization on all modified values , it may be an expensive operation . * @ param fieldNum The position of the field , starting at zero . * @ param value The new value . */ public void setField ( int fieldNum , Value value ) { } }
// range check if ( fieldNum < 0 ) { throw new IndexOutOfBoundsException ( ) ; } // if the field number is beyond the size , the tuple is expanded if ( fieldNum >= this . numFields ) { setNumFields ( fieldNum + 1 ) ; } internallySetField ( fieldNum , value ) ;
public class AlluxioWorkerMonitor { /** * Starts the Alluxio worker monitor . * @ param args command line arguments , should be empty */ public static void main ( String [ ] args ) { } }
if ( args . length != 0 ) { LOG . info ( "java -cp {} {}" , RuntimeConstants . ALLUXIO_JAR , AlluxioWorkerMonitor . class . getCanonicalName ( ) ) ; LOG . warn ( "ignoring arguments" ) ; } AlluxioConfiguration conf = new InstancedConfiguration ( ConfigurationUtils . defaults ( ) ) ; HealthCheckClient client = new WorkerHealthCheckClient ( NetworkAddressUtils . getConnectAddress ( NetworkAddressUtils . ServiceType . WORKER_RPC , conf ) , ONE_MIN_EXP_BACKOFF , conf ) ; if ( ! client . isServing ( ) ) { System . exit ( 1 ) ; } System . exit ( 0 ) ;
public class LocalDate { /** * Constructs a LocalDate from a < code > java . util . Date < / code > * using exactly the same field values . * Each field is queried from the Date and assigned to the LocalDate . * This is useful if you have been using the Date as a local date , * ignoring the zone . * One advantage of this method is that this method is unaffected if the * version of the time zone data differs between the JDK and Joda - Time . * That is because the local field values are transferred , calculated using * the JDK time zone data and without using the Joda - Time time zone data . * This factory method always creates a LocalDate with ISO chronology . * @ param date the Date to extract fields from , not null * @ return the created local date , not null * @ throws IllegalArgumentException if the calendar is null * @ throws IllegalArgumentException if the date is invalid for the ISO chronology */ @ SuppressWarnings ( "deprecation" ) public static LocalDate fromDateFields ( Date date ) { } }
if ( date == null ) { throw new IllegalArgumentException ( "The date must not be null" ) ; } if ( date . getTime ( ) < 0 ) { // handle years in era BC GregorianCalendar cal = new GregorianCalendar ( ) ; cal . setTime ( date ) ; return fromCalendarFields ( cal ) ; } return new LocalDate ( date . getYear ( ) + 1900 , date . getMonth ( ) + 1 , date . getDate ( ) ) ;
public class SFSUtilities { /** * Compute the full extend of a ResultSet using the first geometry field . If * the ResultSet does not contain any geometry field throw an exception * @ param resultSet ResultSet to analyse * @ return The full envelope of the ResultSet * @ throws SQLException */ public static Envelope getResultSetEnvelope ( ResultSet resultSet ) throws SQLException { } }
List < String > geometryFields = getGeometryFields ( resultSet ) ; if ( geometryFields . isEmpty ( ) ) { throw new SQLException ( "This ResultSet doesn't contain any geometry field." ) ; } else { return getResultSetEnvelope ( resultSet , geometryFields . get ( 0 ) ) ; }
public class TableBuilder { /** * add new tr having td tags that has content of each value of list . * The new tr tag have specifed attributes . * @ param tdList * @ param attrMap attributes for new tr tag * @ return * @ throws TagTypeUnmatchException */ @ SuppressWarnings ( "unchecked" ) public < T extends AbstractJaxb > TableBuilder addTr ( List < Object > tdList , Map < String , Object > attrMap ) throws TagTypeUnmatchException { } }
tr tr = new tr ( ) ; tr . setAttr ( attrMap ) ; for ( Object obj : tdList ) { if ( obj instanceof String ) { tr . addTd ( ( String ) obj ) ; } else if ( obj instanceof AbstractJaxb ) { tr . addTd ( ( T ) obj ) ; } else { throw new TagTypeUnmatchException ( "String or other tag object expected but tdList contains " + obj . getClass ( ) . getName ( ) ) ; } } trList . add ( tr ) ; return this ;
public class BasicWebAppActor { /** * reply a request catched by interceptor , note this is server dependent and bound to undertow . * for servlet containers , just override KontraktorServlet methods * @ param exchange */ public void handleDirectRequest ( HttpServerExchange exchange ) { } }
Log . Info ( this , "direct request received " + exchange ) ; getDirectRequestResponse ( exchange . getRequestPath ( ) ) . then ( ( s , err ) -> { exchange . setResponseCode ( 200 ) ; exchange . getResponseHeaders ( ) . put ( Headers . CONTENT_TYPE , "text/html; charset=utf-8" ) ; exchange . getResponseSender ( ) . send ( s == null ? "" + err : s ) ; } ) ;
public class Campaign { /** * Gets the selectiveOptimization value for this Campaign . * @ return selectiveOptimization * Selective optimization setting for this campaign , which includes * a set of conversion * types to optimize this campaign towards . */ public com . google . api . ads . adwords . axis . v201809 . cm . SelectiveOptimization getSelectiveOptimization ( ) { } }
return selectiveOptimization ;
public class JCalendar { /** * Creates a JFrame with a JCalendar inside and can be used for testing . * @ param s * The command line arguments */ public static void main ( String [ ] s ) { } }
JFrame frame = new JFrame ( "JCalendar" ) ; JCalendar jcalendar = new JCalendar ( ) ; frame . getContentPane ( ) . add ( jcalendar ) ; frame . pack ( ) ; frame . setVisible ( true ) ;
public class FileUtil { /** * 删除目录及所有子目录 / 文件 */ public static void deleteDir ( File dir ) throws IOException { } }
Validate . isTrue ( isDirExists ( dir ) , "%s is not exist or not a dir" , dir ) ; deleteDir ( dir . toPath ( ) ) ;
public class DataUnformatFilter { /** * Filter a processing instruction event . * @ param target * The PI target . * @ param data * The PI data . * @ exception org . xml . sax . SAXException * If a filter further down the chain raises an exception . * @ see org . xml . sax . ContentHandler # processingInstruction */ public void processingInstruction ( String target , String data ) throws SAXException { } }
emitWhitespace ( ) ; super . processingInstruction ( target , data ) ;
public class AttachmentManager { /** * Download each attachment in { @ code attachments } to a temporary location , and * return a list of attachments suitable for passing to { @ code setAttachments } . * Typically { @ code attachments } is found via a call to { @ link # findNewAttachments } . * @ param attachments Map of attachments to prepare . * @ return Map of attachments prepared for inserting into attachment store . * @ see # findNewAttachments */ public static Map < String , PreparedAttachment > prepareAttachments ( String attachmentsDir , AttachmentStreamFactory attachmentStreamFactory , Map < String , Attachment > attachments ) throws AttachmentException { } }
Map < String , PreparedAttachment > preparedAttachments = new HashMap < String , PreparedAttachment > ( ) ; for ( Map . Entry < String , Attachment > a : attachments . entrySet ( ) ) { PreparedAttachment pa = AttachmentManager . prepareAttachment ( attachmentsDir , attachmentStreamFactory , a . getValue ( ) ) ; preparedAttachments . put ( a . getKey ( ) , pa ) ; } return preparedAttachments ;
public class HibernateSession { /** * Deletes all objects matching the given criteria . * @ param criteria The criteria * @ return The total number of records deleted */ public long deleteAll ( final QueryableCriteria criteria ) { } }
return getHibernateTemplate ( ) . execute ( ( GrailsHibernateTemplate . HibernateCallback < Integer > ) session -> { JpaQueryBuilder builder = new JpaQueryBuilder ( criteria ) ; builder . setConversionService ( getMappingContext ( ) . getConversionService ( ) ) ; builder . setHibernateCompatible ( true ) ; JpaQueryInfo jpaQueryInfo = builder . buildDelete ( ) ; org . hibernate . query . Query query = session . createQuery ( jpaQueryInfo . getQuery ( ) ) ; getHibernateTemplate ( ) . applySettings ( query ) ; List parameters = jpaQueryInfo . getParameters ( ) ; if ( parameters != null ) { for ( int i = 0 , count = parameters . size ( ) ; i < count ; i ++ ) { query . setParameter ( JpaQueryBuilder . PARAMETER_NAME_PREFIX + ( i + 1 ) , parameters . get ( i ) ) ; } } HibernateHqlQuery hqlQuery = new HibernateHqlQuery ( HibernateSession . this , criteria . getPersistentEntity ( ) , query ) ; ApplicationEventPublisher applicationEventPublisher = datastore . getApplicationEventPublisher ( ) ; applicationEventPublisher . publishEvent ( new PreQueryEvent ( datastore , hqlQuery ) ) ; int result = query . executeUpdate ( ) ; applicationEventPublisher . publishEvent ( new PostQueryEvent ( datastore , hqlQuery , Collections . singletonList ( result ) ) ) ; return result ; } ) ;
public class DocCommentLexer { /** * Returns the character at position < tt > pos < / tt > from the * matched text . * It is equivalent to yytext ( ) . charAt ( pos ) , but faster * @ param pos the position of the character to fetch . * A value from 0 to yylength ( ) - 1. * @ return the character at position pos */ public final char yycharat ( int pos ) { } }
return zzBufferArray != null ? zzBufferArray [ zzStartRead + pos ] : zzBuffer . charAt ( zzStartRead + pos ) ;
public class DebugAndFilterModule { /** * Initialize reusable filters . */ private void initFilters ( ) { } }
ditaWriterFilter = new DitaWriterFilter ( ) ; ditaWriterFilter . setLogger ( logger ) ; ditaWriterFilter . setJob ( job ) ; ditaWriterFilter . setEntityResolver ( reader . getEntityResolver ( ) ) ; topicFragmentFilter = new TopicFragmentFilter ( ATTRIBUTE_NAME_CONREF , ATTRIBUTE_NAME_CONREFEND ) ; tempFileNameScheme . setBaseDir ( job . getInputDir ( ) ) ;
public class DumpServicesHandler { /** * { @ inheritDoc } */ @ Override public void execute ( OperationContext context , ModelNode operation ) throws OperationFailedException { } }
final ServiceName serviceName ; if ( context . getProcessType ( ) . isServer ( ) ) { serviceName = Services . JBOSS_AS ; } else { // The HC / DC service name serviceName = ServiceName . JBOSS . append ( "host" , "controller" ) ; } context . addStep ( new OperationStepHandler ( ) { @ Override public void execute ( OperationContext context , ModelNode operation ) throws OperationFailedException { ServiceController < ? > service = context . getServiceRegistry ( false ) . getRequiredService ( serviceName ) ; ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; PrintStream print = new PrintStream ( out ) ; service . getServiceContainer ( ) . dumpServices ( print ) ; print . flush ( ) ; context . getResult ( ) . set ( new String ( out . toByteArray ( ) , StandardCharsets . UTF_8 ) ) ; } } , OperationContext . Stage . RUNTIME ) ;
public class Constraint { /** * The ' min ' constraint - field must contain a number larger than or equal to min . * @ param min the minimum value * @ return constraint */ public static Constraint min ( final Number min ) { } }
return new Constraint ( "min" , minPayload ( min ) ) { public boolean isValid ( Object actualValue ) { return actualValue == null || ( actualValue instanceof Number && min != null && min . longValue ( ) <= ( ( Number ) actualValue ) . longValue ( ) ) ; } } ;