signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class GrailsParameterMap { /** * Obtains a date for the parameter name using the default format
* @ param name The name of the parameter
* @ return A date or null */
@ Override public Date getDate ( String name ) { } }
|
Object returnValue = wrappedMap . get ( name ) ; if ( "date.struct" . equals ( returnValue ) ) { returnValue = lazyEvaluateDateParam ( name ) ; nestedDateMap . put ( name , returnValue ) ; return ( Date ) returnValue ; } Date date = super . getDate ( name ) ; if ( date == null ) { // try lookup format from messages . properties
String format = lookupFormat ( name ) ; if ( format != null ) { return getDate ( name , format ) ; } } return date ;
|
public class CmsDefaultXmlContentHandler { /** * Adds the given element to the compact view set . < p >
* @ param contentDefinition the XML content definition this XML content handler belongs to
* @ param elementName the element name
* @ param displayType the display type to use for the element widget
* @ throws CmsXmlException in case an unknown element name is used */
protected void addDisplayType ( CmsXmlContentDefinition contentDefinition , String elementName , DisplayType displayType ) throws CmsXmlException { } }
|
if ( contentDefinition . getSchemaType ( elementName ) == null ) { throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_XMLCONTENT_CONFIG_ELEM_UNKNOWN_1 , elementName ) ) ; } m_displayTypes . put ( elementName , displayType ) ;
|
public class AWSDirectoryServiceClient { /** * Removes IP address blocks from a directory .
* @ param removeIpRoutesRequest
* @ return Result of the RemoveIpRoutes operation returned by the service .
* @ throws EntityDoesNotExistException
* The specified entity could not be found .
* @ throws InvalidParameterException
* One or more parameters are not valid .
* @ throws DirectoryUnavailableException
* The specified directory is unavailable or could not be found .
* @ throws ClientException
* A client exception has occurred .
* @ throws ServiceException
* An exception has occurred in AWS Directory Service .
* @ sample AWSDirectoryService . RemoveIpRoutes
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ds - 2015-04-16 / RemoveIpRoutes " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public RemoveIpRoutesResult removeIpRoutes ( RemoveIpRoutesRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeRemoveIpRoutes ( request ) ;
|
public class DescribeAddressesRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < DescribeAddressesRequest > getDryRunRequest ( ) { } }
|
Request < DescribeAddressesRequest > request = new DescribeAddressesRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
|
public class StreamEx { /** * Returns a sequential , ordered { @ link StreamEx } created from given
* { @ link Enumeration } .
* Use this method only if you cannot provide better Stream source ( like
* { @ code Collection } or { @ code Spliterator } ) .
* @ param < T > the type of enumeration elements
* @ param enumeration an enumeration to create the stream from .
* @ return the new stream
* @ since 0.5.1 */
public static < T > StreamEx < T > of ( Enumeration < ? extends T > enumeration ) { } }
|
return of ( new Iterator < T > ( ) { @ Override public boolean hasNext ( ) { return enumeration . hasMoreElements ( ) ; } @ Override public T next ( ) { return enumeration . nextElement ( ) ; } } ) ;
|
public class Polynomial { /** * Checks to see if the coefficients of two polynomials are identical to within tolerance . If the lengths
* of the polynomials are not the same then the extra coefficients in the longer polynomial must be within tolerance
* of zero .
* @ param p Polynomial that this polynomial is being compared against .
* @ param tol Similarity tolerance . Try 1e - 15 for high confidence
* @ return true if the two polynomials are identical to within tolerance . */
public boolean isIdentical ( Polynomial p , double tol ) { } }
|
int m = Math . max ( p . size ( ) , size ( ) ) ; // make sure trailing coefficients are close to zero
for ( int i = p . size ; i < m ; i ++ ) { if ( Math . abs ( c [ i ] ) > tol ) { return false ; } } for ( int i = size ; i < m ; i ++ ) { if ( Math . abs ( p . c [ i ] ) > tol ) { return false ; } } // ensure that the rest of the coefficients are close
int n = Math . min ( p . size ( ) , size ( ) ) ; for ( int i = 0 ; i < n ; i ++ ) { if ( Math . abs ( c [ i ] - p . c [ i ] ) > tol ) { return false ; } } return true ;
|
public class AbstractOrientedBox3F { /** * Compute intersection between an OBB and a capsule .
* @ param centerx is the center point of the oriented box .
* @ param centery is the center point of the oriented box .
* @ param centerz is the center point of the oriented box .
* @ param axis1x are the unit vectors of the oriented box axis .
* @ param axis1y are the unit vectors of the oriented box axis .
* @ param axis1z are the unit vectors of the oriented box axis .
* @ param axis2x are the unit vectors of the oriented box axis .
* @ param axis2y are the unit vectors of the oriented box axis .
* @ param axis2z are the unit vectors of the oriented box axis .
* @ param axis3x are the unit vectors of the oriented box axis .
* @ param axis3y are the unit vectors of the oriented box axis .
* @ param axis3z are the unit vectors of the oriented box axis .
* @ param extentAxis1 are the sizes of the oriented box .
* @ param extentAxis2 are the sizes of the oriented box .
* @ param extentAxis3 are the sizes of the oriented box .
* @ param capsule1Ax x coordinate of the first point of the capsule medial line .
* @ param capsule1Ay y coordinate of the first point of the capsule medial line .
* @ param capsule1Az z coordinate of the first point of the capsule medial line .
* @ param capsule1Bx x coordinate of the second point of the capsule medial line .
* @ param capsule1By y coordinate of the second point of the capsule medial line .
* @ param capsule1Bz z coordinate of the second point of the capsule medial line .
* @ param capsule1Radius - capsule radius
* @ return < code > true < / code > if intersecting , otherwise < code > false < / code > */
@ Pure public static boolean intersectsOrientedBoxCapsule ( double centerx , double centery , double centerz , double axis1x , double axis1y , double axis1z , double axis2x , double axis2y , double axis2z , double axis3x , double axis3y , double axis3z , double extentAxis1 , double extentAxis2 , double extentAxis3 , double capsule1Ax , double capsule1Ay , double capsule1Az , double capsule1Bx , double capsule1By , double capsule1Bz , double capsule1Radius ) { } }
|
Point3f closestFromA = new Point3f ( ) ; Point3f closestFromB = new Point3f ( ) ; computeClosestFarestOBBPoints ( capsule1Ax , capsule1Ay , capsule1Az , centerx , centery , centerz , axis1x , axis1y , axis1z , axis2x , axis2y , axis2z , axis3x , axis3y , axis3z , extentAxis1 , extentAxis2 , extentAxis3 , closestFromA , null ) ; computeClosestFarestOBBPoints ( capsule1Bx , capsule1By , capsule1Bz , centerx , centery , centerz , axis1x , axis1y , axis1z , axis2x , axis2y , axis2z , axis3x , axis3y , axis3z , extentAxis1 , extentAxis2 , extentAxis3 , closestFromB , null ) ; double distance = AbstractSegment3F . distanceSquaredSegmentSegment ( capsule1Ax , capsule1Ay , capsule1Az , capsule1Bx , capsule1By , capsule1Bz , closestFromA . getX ( ) , closestFromA . getY ( ) , closestFromA . getZ ( ) , closestFromB . getX ( ) , closestFromB . getY ( ) , closestFromB . getZ ( ) ) ; return ( distance <= ( capsule1Radius * capsule1Radius ) ) ;
|
public class ScriptRuntime { /** * The eval function property of the global object .
* See ECMA 15.1.2.1 */
public static Object evalSpecial ( Context cx , Scriptable scope , Object thisArg , Object [ ] args , String filename , int lineNumber ) { } }
|
if ( args . length < 1 ) return Undefined . instance ; Object x = args [ 0 ] ; if ( ! ( x instanceof CharSequence ) ) { if ( cx . hasFeature ( Context . FEATURE_STRICT_MODE ) || cx . hasFeature ( Context . FEATURE_STRICT_EVAL ) ) { throw Context . reportRuntimeError0 ( "msg.eval.nonstring.strict" ) ; } String message = ScriptRuntime . getMessage0 ( "msg.eval.nonstring" ) ; Context . reportWarning ( message ) ; return x ; } if ( filename == null ) { int [ ] linep = new int [ 1 ] ; filename = Context . getSourcePositionFromStack ( linep ) ; if ( filename != null ) { lineNumber = linep [ 0 ] ; } else { filename = "" ; } } String sourceName = ScriptRuntime . makeUrlForGeneratedScript ( true , filename , lineNumber ) ; ErrorReporter reporter ; reporter = DefaultErrorReporter . forEval ( cx . getErrorReporter ( ) ) ; Evaluator evaluator = Context . createInterpreter ( ) ; if ( evaluator == null ) { throw new JavaScriptException ( "Interpreter not present" , filename , lineNumber ) ; } // Compile with explicit interpreter instance to force interpreter
// mode .
Script script = cx . compileString ( x . toString ( ) , evaluator , reporter , sourceName , 1 , null ) ; evaluator . setEvalScriptFlag ( script ) ; Callable c = ( Callable ) script ; return c . call ( cx , scope , ( Scriptable ) thisArg , ScriptRuntime . emptyArgs ) ;
|
public class CmsImportResultList { /** * Ensures the existence of the ' empty ' label . < p > */
protected void ensureEmptyLabel ( ) { } }
|
if ( m_emptyLabel == null ) { m_emptyLabel = new Label ( CmsAliasMessages . messagesEmptyImportResult ( ) ) ; } m_root . add ( m_emptyLabel ) ;
|
import java . util . * ; class GatherElements { /** * This function groups the first elements of tuples based on their second elements in the provided list of tuples .
* Examples :
* gather _ elements ( new int [ ] [ ] { { 6 , 5 } , { 2 , 7 } , { 2 , 5 } , { 8 , 7 } , { 9 , 8 } , { 3 , 7 } } )
* { 5 : [ 6 , 2 ] , 7 : [ 2 , 8 , 3 ] , 8 : [ 9 ] }
* gather _ elements ( new int [ ] [ ] { { 7 , 6 } , { 3 , 8 } , { 3 , 6 } , { 9 , 8 } , { 10 , 9 } , { 4 , 8 } } )
* { 6 : [ 7 , 3 ] , 8 : [ 3 , 9 , 4 ] , 9 : [ 10 ] }
* gather _ elements ( new int [ ] [ ] { { 8 , 7 } , { 4 , 9 } , { 4 , 7 } , { 10 , 9 } , { 11 , 10 } , { 5 , 9 } } )
* { 7 : [ 8 , 4 ] , 9 : [ 4 , 10 , 5 ] , 10 : [ 11 ] }
* Parameters :
* tuple _ list : A list of tuples .
* Returns :
* A dictionary where keys are the second elements of tuples , and the value is a list of first elements of tuples with the same second element . */
public static Map < Integer , List < Integer > > gatherElements ( int [ ] [ ] tupleList ) { } }
|
Map < Integer , List < Integer > > result = new HashMap < > ( ) ; // Sort the array based on the second element of the tuples
Arrays . sort ( tupleList , new Comparator < int [ ] > ( ) { public int compare ( int [ ] a , int [ ] b ) { return Integer . compare ( a [ 1 ] , b [ 1 ] ) ; } } ) ; // Group the elements based on the second element of the tuple
for ( int [ ] tuple : tupleList ) { if ( ! result . containsKey ( tuple [ 1 ] ) ) { result . put ( tuple [ 1 ] , new ArrayList < Integer > ( ) ) ; } result . get ( tuple [ 1 ] ) . add ( tuple [ 0 ] ) ; } return result ;
|
public class SupplementaryMaterial { /** * Gets the value of the caption property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the caption property .
* For example , to add a new item , do as follows :
* < pre >
* getCaption ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link Caption } */
public java . util . List < Caption > getCaption ( ) { } }
|
if ( caption == null ) { caption = new ArrayList < Caption > ( ) ; } return this . caption ;
|
public class SerializationUtils { /** * Deserialize a String obtained via { @ link # serialize ( Serializable ) } into an object , using the
* given { @ link BaseEncoding } , which must be the same { @ link BaseEncoding } used to serialize the object .
* @ param serialized The serialized String
* @ param clazz The class the deserialized object should be cast to .
* @ param enc The { @ link BaseEncoding } used to decode the String .
* @ return The deserialized object
* @ throws IOException if it fails to deserialize the object */
public static < T extends Serializable > T deserialize ( String serialized , Class < T > clazz , BaseEncoding enc ) throws IOException { } }
|
return deserializeFromBytes ( enc . decode ( serialized ) , clazz ) ;
|
public class Stage { /** * Answers a { @ code Protocols } that provides one or more supported { @ code protocols } for the
* newly created { @ code Actor } according to { @ code definition } .
* @ param protocols the { @ code Class < ? > } [ ] array of protocols that the { @ code Actor } supports
* @ param definition the { @ code Definition } providing parameters to the { @ code Actor }
* @ param parent the Actor that is this actor ' s parent
* @ param maybeSupervisor the possible Supervisor of this actor
* @ param logger the Logger of this actor
* @ return Protocols */
public Protocols actorFor ( final Class < ? > [ ] protocols , final Definition definition , final Actor parent , final Supervisor maybeSupervisor , final Logger logger ) { } }
|
final ActorProtocolActor < Object > [ ] all = actorProtocolFor ( protocols , definition , parent , maybeSupervisor , logger ) ; return new Protocols ( ActorProtocolActor . toActors ( all ) ) ;
|
public class Revision { /** * The user - defined properties , without the ones reserved by CouchDB .
* This is based on - properties , with every key whose name starts with " _ " removed .
* @ return user - defined properties , without the ones reserved by CouchDB . */
@ InterfaceAudience . Public public Map < String , Object > getUserProperties ( ) { } }
|
Map < String , Object > result = new HashMap < String , Object > ( ) ; Map < String , Object > sourceMap = getProperties ( ) ; for ( String key : sourceMap . keySet ( ) ) { if ( ! key . startsWith ( "_" ) ) { result . put ( key , sourceMap . get ( key ) ) ; } } return result ;
|
public class GetWSDL { /** * Run Method . */
public void run ( ) { } }
|
Record record = this . getMainRecord ( ) ; try { Writer out = new StringWriter ( ) ; MessageDetailTarget messageDetailTarget = ( MessageDetailTarget ) this . getMainRecord ( ) ; String strSite = messageDetailTarget . getProperty ( TrxMessageHeader . DESTINATION_PARAM ) ; String strWSDLPath = messageDetailTarget . getProperty ( TrxMessageHeader . WSDL_PATH ) ; strWSDLPath = this . getFullPath ( strSite , strWSDLPath ) ; // x strWSDLPath = " http : / / www . tourloco . com : 8181 / tour / apphtml ? datatype = wsdl & version = b2007 " ;
Utility . transferURLStream ( strWSDLPath , null , null , out ) ; out . flush ( ) ; out . close ( ) ; record . edit ( ) ; this . processWSDLXML ( out . toString ( ) ) ; record . set ( ) ; if ( this . getProperty ( TrxMessageHeader . REGISTRY_ID ) != null ) // The return Queue ID
{ Application app = ( Application ) this . getTask ( ) . getApplication ( ) ; Integer intFilterID = new Integer ( this . getProperty ( TrxMessageHeader . REGISTRY_ID ) ) ; TrxMessageHeader messageHeader = new TrxMessageHeader ( MessageConstants . TRX_RECEIVE_QUEUE , MessageConstants . INTERNET_QUEUE , null ) ; messageHeader . setRegistryIDMatch ( intFilterID ) ; Map < String , Object > map = new Hashtable < String , Object > ( ) ; map . put ( DBConstants . OBJECT_ID , this . getProperty ( DBConstants . OBJECT_ID ) ) ; BaseMessage message = new MapMessage ( messageHeader , map ) ; // Tell the sender that I ' ve finished ( not required )
app . getMessageManager ( ) . sendMessage ( message ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } catch ( DBException e ) { e . printStackTrace ( ) ; return ; }
|
public class ColorPanelView { /** * Show a toast message with the hex color code below the view . */
public void showHint ( ) { } }
|
final int [ ] screenPos = new int [ 2 ] ; final Rect displayFrame = new Rect ( ) ; getLocationOnScreen ( screenPos ) ; getWindowVisibleDisplayFrame ( displayFrame ) ; final Context context = getContext ( ) ; final int width = getWidth ( ) ; final int height = getHeight ( ) ; final int midy = screenPos [ 1 ] + height / 2 ; int referenceX = screenPos [ 0 ] + width / 2 ; if ( ViewCompat . getLayoutDirection ( this ) == ViewCompat . LAYOUT_DIRECTION_LTR ) { final int screenWidth = context . getResources ( ) . getDisplayMetrics ( ) . widthPixels ; referenceX = screenWidth - referenceX ; // mirror
} StringBuilder hint = new StringBuilder ( "#" ) ; if ( Color . alpha ( color ) != 255 ) { hint . append ( Integer . toHexString ( color ) . toUpperCase ( Locale . ENGLISH ) ) ; } else { hint . append ( String . format ( "%06X" , 0xFFFFFF & color ) . toUpperCase ( Locale . ENGLISH ) ) ; } Toast cheatSheet = Toast . makeText ( context , hint . toString ( ) , Toast . LENGTH_SHORT ) ; if ( midy < displayFrame . height ( ) ) { // Show along the top ; follow action buttons
cheatSheet . setGravity ( Gravity . TOP | GravityCompat . END , referenceX , screenPos [ 1 ] + height - displayFrame . top ) ; } else { // Show along the bottom center
cheatSheet . setGravity ( Gravity . BOTTOM | Gravity . CENTER_HORIZONTAL , 0 , height ) ; } cheatSheet . show ( ) ;
|
public class ViewFactory { /** * Create the field view .
* @ param screenFieldClass The screen field Class
* @ param The model to create a view for .
* @ return The new view for this model . */
public ScreenFieldView getViewClassForModel ( Class < ? > screenFieldClass ) { } }
|
while ( screenFieldClass != null ) { String strModelClassName = screenFieldClass . getName ( ) ; String strViewClassName = null ; if ( ENABLE_CACHE ) strViewClassName = m_classCache . getProperty ( strModelClassName ) ; // Name in cache ?
if ( strViewClassName == null ) strViewClassName = this . getViewClassNameFromModelClassName ( strModelClassName ) ; if ( strViewClassName != null ) { // Great , found the class name . Try to instantiate the class .
ScreenFieldView view = ( ScreenFieldView ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( strViewClassName ) ; // Ignore class not found
if ( view != null ) { if ( ENABLE_CACHE ) m_classCache . setProperty ( strModelClassName , strViewClassName ) ; // Success - cache the name for later
return view ; } } screenFieldClass = screenFieldClass . getSuperclass ( ) ; } return null ;
|
public class ElementFilter { /** * Returns a list of fields in { @ code elements } .
* @ return a list of fields in { @ code elements }
* @ param elements the elements to filter */
public static List < VariableElement > fieldsIn ( Iterable < ? extends Element > elements ) { } }
|
return listFilter ( elements , FIELD_KINDS , VariableElement . class ) ;
|
public class VectorMath { /** * Sums the values in the vector , returning the result . */
public static double sum ( DoubleVector v ) { } }
|
double sum = 0 ; if ( v instanceof SparseVector ) { for ( int nz : ( ( SparseVector ) v ) . getNonZeroIndices ( ) ) sum += v . get ( nz ) ; } else { int len = v . length ( ) ; for ( int i = 0 ; i < len ; ++ i ) sum += v . get ( i ) ; } return sum ;
|
public class Configurer { /** * Get the class implementation from its name . Default constructor must be available .
* @ param < T > The instance type .
* @ param type The class type .
* @ param path The node path .
* @ return The typed class instance .
* @ throws LionEngineException If invalid class . */
public final < T > T getImplementation ( Class < T > type , String ... path ) { } }
|
return getImplementation ( getClass ( ) . getClassLoader ( ) , type , path ) ;
|
public class SQLSupport { /** * Create a SQL ORDER BY clause from the list of { @ link Sort } objects . This fragment begins with
* ORDER BY . If the given list of sorts contains a sort with sort expression " foo " and sort direction
* { @ link SortDirection # DESCENDING } , the generated SQL statement will appear as :
* < pre >
* ORDER BY foo DESC
* < / pre >
* @ param sorts the list of { @ link Sort } objects
* @ return the generated SQL ORDER BY clause or an emtpy string if there are no sorts */
public final String createOrderByClause ( List /* < Sort > */
sorts ) { } }
|
if ( sorts == null || sorts . size ( ) == 0 ) return EMPTY ; InternalStringBuilder sql = new InternalStringBuilder ( 64 ) ; sql . append ( "ORDER BY " ) ; internalCreateOrderByFragment ( sql , sorts ) ; return sql . toString ( ) ;
|
public class StatementServiceImp { /** * / * ( non - Javadoc )
* @ see com . popbill . api . StatementService # getFiles ( java . lang . String , java . number . Integer , java . lang . String ) */
@ Override public AttachedFile [ ] getFiles ( String CorpNum , int ItemCode , String MgtKey ) throws PopbillException { } }
|
if ( MgtKey == null || MgtKey . isEmpty ( ) ) throw new PopbillException ( - 99999999 , "관리번호가 입력되지 않았습니다." ) ; return httpget ( "/Statement/" + ItemCode + "/" + MgtKey + "/Files" , CorpNum , null , AttachedFile [ ] . class ) ;
|
public class MusicService { /** * Sets the current sound state according to the value stored in preferences . Mostly for internal use .
* @ param preferences path to the preferences . Will be set as global music preferences path .
* @ param preferenceName name of the state preference .
* @ param defaultValue used if preference is not set . */
public void setSoundEnabledFromPreferences ( final String preferences , final String preferenceName , final boolean defaultValue ) { } }
|
musicPreferences = preferences ; soundEnabledPreferenceName = preferenceName ; setSoundEnabled ( readFromPreferences ( preferences , preferenceName , defaultValue ) ) ;
|
public class GraphQLArgument { /** * This helps you transform the current GraphQLArgument into another one by starting a builder with all
* the current values and allows you to transform it how you want .
* @ param builderConsumer the consumer code that will be given a builder to transform
* @ return a new field based on calling build on that builder */
public GraphQLArgument transform ( Consumer < Builder > builderConsumer ) { } }
|
Builder builder = newArgument ( this ) ; builderConsumer . accept ( builder ) ; return builder . build ( ) ;
|
public class CassandraSchemaManager { /** * initiate client method initiates the client .
* @ return boolean value ie client started or not . */
protected boolean initiateClient ( ) { } }
|
Throwable message = null ; for ( String host : hosts ) { if ( host == null || ! StringUtils . isNumeric ( port ) || port . isEmpty ( ) ) { log . error ( "Host or port should not be null, Port should be numeric." ) ; throw new IllegalArgumentException ( "Host or port should not be null, Port should be numeric." ) ; } int thriftPort = externalProperties . get ( CassandraConstants . THRIFT_PORT ) != null ? Integer . parseInt ( ( String ) externalProperties . get ( CassandraConstants . THRIFT_PORT ) ) : Integer . parseInt ( port ) ; TSocket socket = new TSocket ( host , thriftPort ) ; TTransport transport = new TFramedTransport ( socket ) ; TProtocol protocol = new TBinaryProtocol ( transport , true , true ) ; cassandra_client = new Cassandra . Client ( protocol ) ; try { if ( ! socket . isOpen ( ) ) { socket . open ( ) ; if ( userName != null ) { Map < String , String > credentials = new HashMap < String , String > ( ) ; credentials . put ( "username" , userName ) ; credentials . put ( "password" , password ) ; AuthenticationRequest auth_request = new AuthenticationRequest ( credentials ) ; cassandra_client . login ( auth_request ) ; } } return true ; } catch ( TTransportException e ) { message = e ; log . warn ( "Error while opening socket for host {}, skipping for next available node " , host ) ; } catch ( Exception e ) { log . error ( "Error during creating schema in cassandra, Caused by: ." , e ) ; throw new SchemaGenerationException ( e , "Cassandra" ) ; } } throw new SchemaGenerationException ( "Error while opening socket, Caused by: ." , message , "Cassandra" ) ;
|
public class XmlMultiConfiguration { /** * Create a builder seeded from an existing { @ code XmlMultiConfiguration } .
* @ param config existing configuration seed
* @ return a builder seeded with the xml configuration */
public static Builder from ( XmlMultiConfiguration config ) { } }
|
return new Builder ( ) { @ Override public Builder withManager ( String identity , Configuration configuration ) { Map < String , Config > configurations = new HashMap < > ( config . configurations ) ; configurations . put ( identity , new SingleConfig ( configuration ) ) ; return from ( new XmlMultiConfiguration ( configurations ) ) ; } @ Override public Builder withoutManager ( String identity ) { Map < String , Config > configurations = config . configurations ; configurations . remove ( identity ) ; return from ( new XmlMultiConfiguration ( configurations ) ) ; } @ Override public Variant withManager ( String identity ) { Map < String , Configuration > variants = new HashMap < > ( ) ; Config current = config . configurations . get ( identity ) ; if ( current instanceof VariantConfig ) { variants . putAll ( ( ( VariantConfig ) current ) . configs ) ; } else if ( current != null ) { throw new IllegalStateException ( "Existing non-variant configuration cannot be replaced - it must be removed first." ) ; } return new Variant ( ) { @ Override public Variant withoutVariant ( String variant ) { variants . remove ( variant ) ; return this ; } @ Override public Variant variant ( String variant , Configuration configuration ) { variants . put ( variant , configuration ) ; return this ; } @ Override public Builder withoutManager ( String identity ) { return from ( build ( ) ) . withoutManager ( identity ) ; } @ Override public Builder withManager ( String identity , Configuration configuration ) { return from ( build ( ) ) . withManager ( identity , configuration ) ; } @ Override public Variant withManager ( String identity ) { return from ( build ( ) ) . withManager ( identity ) ; } @ Override public XmlMultiConfiguration build ( ) { Map < String , Config > configurations = new HashMap < > ( config . configurations ) ; configurations . put ( identity , new VariantConfig ( variants ) ) ; return new XmlMultiConfiguration ( configurations ) ; } } ; } @ Override public XmlMultiConfiguration build ( ) { return config ; } } ;
|
public class ValidateEnv { /** * Parses the command line arguments and options in { @ code args } .
* After successful execution of this method , command line arguments can be
* retrieved by invoking { @ link CommandLine # getArgs ( ) } , and options can be
* retrieved by calling { @ link CommandLine # getOptions ( ) } .
* @ param args command line arguments to parse
* @ return { @ link CommandLine } object representing the parsing result
* @ throws InvalidArgumentException if command line contains invalid argument ( s ) */
private static CommandLine parseArgsAndOptions ( Options options , String ... args ) throws InvalidArgumentException { } }
|
CommandLineParser parser = new DefaultParser ( ) ; CommandLine cmd ; try { cmd = parser . parse ( options , args ) ; } catch ( ParseException e ) { throw new InvalidArgumentException ( "Failed to parse args for validateEnv" , e ) ; } return cmd ;
|
public class ArrayFile { /** * Sets the water marks of this ArrayFile .
* @ param lwmScn - the low water mark
* @ param hwmScn - the high water mark
* @ throws IOException if the < code > lwmScn < / code > is greater than the < code > hwmScn < / code >
* or the changes to the underlying file cannot be flushed . */
public void setWaterMarks ( long lwmScn , long hwmScn ) throws IOException { } }
|
if ( lwmScn <= hwmScn ) { writeHwmScn ( hwmScn ) ; _writer . flush ( ) ; writeLwmScn ( lwmScn ) ; _writer . flush ( ) ; } else { throw new IOException ( "Invalid water marks: lwmScn=" + lwmScn + " hwmScn=" + hwmScn ) ; }
|
public class AdvancedResizeOp { /** * { @ inheritDoc } */
public final BufferedImage createCompatibleDestImage ( BufferedImage src , ColorModel destCM ) { } }
|
if ( destCM == null ) { destCM = src . getColorModel ( ) ; } return new BufferedImage ( destCM , destCM . createCompatibleWritableRaster ( src . getWidth ( ) , src . getHeight ( ) ) , destCM . isAlphaPremultiplied ( ) , null ) ;
|
public class AvailableCapacity { /** * The total number of instances supported by the Dedicated Host .
* @ return The total number of instances supported by the Dedicated Host . */
public java . util . List < InstanceCapacity > getAvailableInstanceCapacity ( ) { } }
|
if ( availableInstanceCapacity == null ) { availableInstanceCapacity = new com . amazonaws . internal . SdkInternalList < InstanceCapacity > ( ) ; } return availableInstanceCapacity ;
|
public class CompositeBinding { /** * Removes a { @ link Binding } from this { @ code CompositeBinding } , and disposes the
* { @ link Binding } .
* @ param b the { @ link Binding } to remove */
public void remove ( final Binding b ) { } }
|
if ( ! disposedInd ) { boolean unsubscribe = false ; if ( bindings == null ) { return ; } unsubscribe = bindings . remove ( b ) ; if ( unsubscribe ) { // if we removed successfully we then need to call dispose on it
b . dispose ( ) ; } }
|
public class GPARCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setSTART ( Integer newSTART ) { } }
|
Integer oldSTART = start ; start = newSTART ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . GPARC__START , oldSTART , start ) ) ;
|
public class Resolver { /** * syck _ resolver _ node _ import */
@ JRubyMethod public static IRubyObject node_import ( IRubyObject self , IRubyObject node ) { } }
|
// System . err . println ( " syck _ resolver _ node _ import ( ) " ) ;
final Ruby runtime = self . getRuntime ( ) ; final ThreadContext ctx = runtime . getCurrentContext ( ) ; org . yecht . Node n = ( org . yecht . Node ) node . dataGetStructChecked ( ) ; YAMLExtra x = ( ( Node ) node ) . x ; IRubyObject obj = null ; switch ( n . kind ) { case Str : Data . Str dd = ( Data . Str ) n . data ; obj = RubyString . newStringShared ( runtime , dd . ptr . buffer , dd . ptr . start , dd . len ) ; break ; case Seq : Data . Seq ds = ( Data . Seq ) n . data ; obj = RubyArray . newArray ( runtime , ds . idx ) ; for ( int i = 0 ; i < ds . idx ; i ++ ) { IRubyObject obj2 = ( IRubyObject ) n . seqRead ( i ) ; ( ( RubyArray ) obj ) . store ( i , obj2 ) ; } break ; case Map : Data . Map dm = ( Data . Map ) n . data ; obj = RubyHash . newHash ( runtime ) ; RubyClass cMergeKey = x . MergeKey ; RubyClass cDefaultKey = x . DefaultKey ; RubyClass cHash = runtime . getHash ( ) ; RubyClass cArray = runtime . getArray ( ) ; for ( int i = 0 ; i < dm . idx ; i ++ ) { IRubyObject k = ( IRubyObject ) n . mapRead ( MapPart . Key , i ) ; IRubyObject v = ( IRubyObject ) n . mapRead ( MapPart . Value , i ) ; if ( null == v ) { v = runtime . getNil ( ) ; } boolean skip_aset = false ; if ( cMergeKey . isInstance ( k ) ) { if ( cHash . isInstance ( v ) ) { IRubyObject dup = v . callMethod ( ctx , "dup" ) ; dup . callMethod ( ctx , "update" , obj ) ; obj = dup ; skip_aset = true ; } else if ( cArray . isInstance ( v ) ) { IRubyObject end = ( ( RubyArray ) v ) . pop ( ctx ) ; if ( cHash . isInstance ( end ) ) { final IRubyObject dup = end . callMethod ( ctx , "dup" ) ; v = ( ( RubyArray ) v ) . reverse ( ) ; ( ( RubyArray ) v ) . append ( obj ) ; RubyEnumerable . callEach ( runtime , ctx , v , new BlockCallback ( ) { // syck _ merge _ i
public IRubyObject call ( ThreadContext _ctx , IRubyObject [ ] largs , Block blk ) { IRubyObject entry = largs [ 0 ] ; IRubyObject tmp = null ; if ( ! ( tmp = TypeConverter . convertToTypeWithCheck ( entry , runtime . getHash ( ) , "to_hash" ) ) . isNil ( ) ) { dup . callMethod ( _ctx , "update" , tmp ) ; } return runtime . getNil ( ) ; } } ) ; obj = dup ; skip_aset = true ; } } } else if ( cDefaultKey . isInstance ( k ) ) { obj . callMethod ( ctx , "default=" , v ) ; skip_aset = true ; } if ( ! skip_aset ) { ( ( RubyHash ) obj ) . fastASet ( k , v ) ; } } break ; } if ( n . type_id != null ) { obj = self . callMethod ( ctx , "transfer" , new IRubyObject [ ] { runtime . newString ( n . type_id ) , obj } ) ; } return obj ;
|
class Main { /** This function calculates the length of an arc given the diameter and angle in degrees .
It returns NULL for angles that are equal to or more than 360 degrees .
Examples :
calculateArcLength ( 9 , 45 ) - > 3.5357142857142856
calculateArcLength ( 9 , 480 ) - > NULL
calculateArcLength ( 5 , 270 ) - > 11.785714285714285
Args :
diameter : The diameter of the circle
angle : The measure of the angle in degrees
Returns :
The length of arc for the given diameter and angle */
public static Double calculateArcLength ( float diameter , float angle ) { } public static void main ( String [ ] args ) { System . out . println ( "Arc Length: " + calculateArcLength ( 9 , 45 ) ) ; } }
|
double PI = 22.0 / 7.0 ; if ( angle >= 360 ) { return null ; } double arc_length = ( PI * diameter ) * ( angle / 360.0 ) ; return arc_length ;
|
public class ClustersInner { /** * Lists the HDInsight clusters in a resource group .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ClusterInner & gt ; object */
public Observable < Page < ClusterInner > > listByResourceGroupNextAsync ( final String nextPageLink ) { } }
|
return listByResourceGroupNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < ClusterInner > > , Page < ClusterInner > > ( ) { @ Override public Page < ClusterInner > call ( ServiceResponse < Page < ClusterInner > > response ) { return response . body ( ) ; } } ) ;
|
public class SyntheticStorableReferenceBuilder { /** * Returns true if the properties of the given index entry match those
* contained in the master , excluding any version property . This will
* always return true after a call to copyFromMaster .
* @ param indexEntry
* index entry whose properties will be tested
* @ param master
* source of property values
* @ deprecated call getReferenceAccess */
@ Deprecated public boolean isConsistent ( Storable indexEntry , S master ) throws FetchException { } }
|
return getReferenceAccess ( ) . isConsistent ( indexEntry , master ) ;
|
public class UserProfileDto { /** * transformers / / / / / */
public static UserProfileDto fromUser ( User user ) { } }
|
UserProfileDto result = new UserProfileDto ( ) ; result . id = user . getId ( ) ; result . firstName = user . getFirstName ( ) ; result . lastName = user . getLastName ( ) ; result . email = user . getEmail ( ) ; return result ;
|
public class MemoryInfo { /** * Returns an estimation , in bytes , of the memory usage of the given objects plus ( recursively )
* objects referenced via non - static references from any of those objects . Which references are
* traversed depends on the VisibilityFilter passed in . If two or more of the given objects
* reference the same Object X , then the memory used by Object X will only be counted once . However ,
* the method guarantees that the memory for a given object ( either in the passed - in collection or
* found while traversing the object graphs from those objects ) will not be counted more than once .
* The estimate for each individual object is provided by the running JVM and is likely to be as
* accurate a measure as can be reasonably made by the running Java program . It will generally
* include memory taken up for " housekeeping " of that object .
* @ param objs The collection of objects whose memory usage is to be totalled .
* @ param referenceFilter Specifies which references are to be recursively included in the resulting
* count ( ALL , PRIVATE _ ONLY , NON _ PUBLIC , NONE ) .
* @ return An estimate , in bytes , of the total heap memory taken up by the obejcts in objs and ,
* recursively , objects referenced by any of those objects that match the VisibilityFilter
* criterion .
* @ throws IOException */
public static long deepMemoryUsageOfAll ( Instrumentation inst , final Collection < ? extends java . lang . Object > objs , final int referenceFilter ) throws IOException { } }
|
long total = 0L ; final Set < Integer > counted = new HashSet < Integer > ( objs . size ( ) * 4 ) ; for ( final Object o : objs ) { total += deepMemoryUsageOf0 ( inst , counted , o , referenceFilter ) ; } return total ;
|
public class NetworkInterfaceTapConfigurationsInner { /** * Creates or updates a Tap configuration in the specified NetworkInterface .
* @ param resourceGroupName The name of the resource group .
* @ param networkInterfaceName The name of the network interface .
* @ param tapConfigurationName The name of the tap configuration .
* @ param tapConfigurationParameters Parameters supplied to the create or update tap configuration operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the NetworkInterfaceTapConfigurationInner object if successful . */
public NetworkInterfaceTapConfigurationInner createOrUpdate ( String resourceGroupName , String networkInterfaceName , String tapConfigurationName , NetworkInterfaceTapConfigurationInner tapConfigurationParameters ) { } }
|
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , networkInterfaceName , tapConfigurationName , tapConfigurationParameters ) . toBlocking ( ) . last ( ) . body ( ) ;
|
public class MediaMarkupBuilderUtil { /** * Get dimension from first media format defined in media args . Fall back to dummy min . dimension if none specified .
* @ param media Media metadata
* @ return Dimension */
public static @ NotNull Dimension getMediaformatDimension ( @ NotNull Media media ) { } }
|
// Create dummy image element to be displayed in Edit mode as placeholder .
MediaArgs mediaArgs = media . getMediaRequest ( ) . getMediaArgs ( ) ; MediaFormat [ ] mediaFormats = mediaArgs . getMediaFormats ( ) ; // detect width / height - either from media args , or from first media format
long width = mediaArgs . getFixedWidth ( ) ; long height = mediaArgs . getFixedHeight ( ) ; if ( ( width == 0 || height == 0 ) && mediaFormats != null && mediaFormats . length > 0 ) { MediaFormat firstMediaFormat = mediaArgs . getMediaFormats ( ) [ 0 ] ; Dimension dimension = firstMediaFormat . getMinDimension ( ) ; if ( dimension != null ) { width = dimension . getWidth ( ) ; height = dimension . getHeight ( ) ; } } // fallback to min width / height
if ( width == 0 ) { width = MediaMarkupBuilder . DUMMY_MIN_DIMENSION ; } if ( height == 0 ) { height = MediaMarkupBuilder . DUMMY_MIN_DIMENSION ; } return new Dimension ( width , height ) ;
|
public class ReflectUtils { /** * Get the root cause of the Exception
* @ param e The Exception
* @ return The root cause of the Exception */
private static RuntimeException getCause ( InvocationTargetException e ) { } }
|
Throwable cause = e . getCause ( ) ; if ( cause instanceof RuntimeException ) throw ( RuntimeException ) cause ; else throw new IllegalArgumentException ( e . getCause ( ) ) ;
|
public class AppsImpl { /** * Gets all the available custom prebuilt domains for a specific culture .
* @ param culture Culture .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; PrebuiltDomain & gt ; object */
public Observable < List < PrebuiltDomain > > listAvailableCustomPrebuiltDomainsForCultureAsync ( String culture ) { } }
|
return listAvailableCustomPrebuiltDomainsForCultureWithServiceResponseAsync ( culture ) . map ( new Func1 < ServiceResponse < List < PrebuiltDomain > > , List < PrebuiltDomain > > ( ) { @ Override public List < PrebuiltDomain > call ( ServiceResponse < List < PrebuiltDomain > > response ) { return response . body ( ) ; } } ) ;
|
public class MoskitoHttpServlet { /** * Creates the stats objects . Registers the servlet at the ProducerRegistry . */
@ Override public void init ( ServletConfig config ) throws ServletException { } }
|
super . init ( config ) ; getStats = new ServletStats ( "get" , getMonitoringIntervals ( ) ) ; postStats = new ServletStats ( "post" , getMonitoringIntervals ( ) ) ; putStats = new ServletStats ( "put" , getMonitoringIntervals ( ) ) ; headStats = new ServletStats ( "head" , getMonitoringIntervals ( ) ) ; optionsStats = new ServletStats ( "options" , getMonitoringIntervals ( ) ) ; traceStats = new ServletStats ( "trace" , getMonitoringIntervals ( ) ) ; deleteStats = new ServletStats ( "delete" , getMonitoringIntervals ( ) ) ; lastModifiedStats = new ServletStats ( "lastModified" , getMonitoringIntervals ( ) ) ; cachedStatList = new ArrayList < IStats > ( useShortStatList ( ) ? 2 : 8 ) ; cachedStatList . add ( getStats ) ; cachedStatList . add ( postStats ) ; if ( ! useShortStatList ( ) ) { cachedStatList . add ( deleteStats ) ; cachedStatList . add ( headStats ) ; cachedStatList . add ( optionsStats ) ; cachedStatList . add ( putStats ) ; cachedStatList . add ( traceStats ) ; cachedStatList . add ( lastModifiedStats ) ; } ProducerRegistryFactory . getProducerRegistryInstance ( ) . registerProducer ( this ) ;
|
public class ProxyBinding { /** * As { @ link # proxy ( Class , java . lang . reflect . InvocationHandler ) } but with a
* predefined no - op { @ link java . lang . reflect . InvocationHandler } . */
public static < P > P proxy ( Class < P > anyInterface ) { } }
|
return proxy ( Interface . type ( anyInterface ) ) ;
|
public class CorneredEditText { /** * 根据指定的正则验证用户输入的值 */
public String getValue ( ) { } }
|
String value = getText ( ) . toString ( ) ; return isEmpty ( value_verify_regex ) ? value : ( verifyValue ( ) ? value : "" ) ;
|
public class Field { /** * Write the field to the specified channel .
* @ param channel the channel to which it should be written
* @ throws IOException if there is a problem writing to the channel */
public void write ( WritableByteChannel channel ) throws IOException { } }
|
logger . debug ( "..writing> {}" , this ) ; Util . writeFully ( getBytes ( ) , channel ) ;
|
public class ServiceActionDetailMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ServiceActionDetail serviceActionDetail , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( serviceActionDetail == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( serviceActionDetail . getServiceActionSummary ( ) , SERVICEACTIONSUMMARY_BINDING ) ; protocolMarshaller . marshall ( serviceActionDetail . getDefinition ( ) , DEFINITION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class LongRunningJobMonitor { /** * cleanup jobs that have finished executing after { @ link # thresholdDetectionInMillis } */
Long cleanUpLongJobIfItHasFinishedExecuting ( long currentTime , Job job ) { } }
|
if ( longRunningJobs . containsKey ( job ) && longRunningJobs . get ( job ) . executionsCount != job . executionsCount ( ) ) { Long jobLastExecutionTimeInMillis = job . lastExecutionEndedTimeInMillis ( ) ; int jobExecutionsCount = job . executionsCount ( ) ; LongRunningJobInfo jobRunningInfo = longRunningJobs . get ( job ) ; long jobExecutionDuration = 0L ; if ( jobExecutionsCount == jobRunningInfo . executionsCount + 1 ) { jobExecutionDuration = jobLastExecutionTimeInMillis - jobRunningInfo . jobStartedtimeInMillis ; logger . info ( "Job '{}' has finished executing after {}ms" , job . name ( ) , jobExecutionDuration ) ; } else { jobExecutionDuration = currentTime - jobRunningInfo . jobStartedtimeInMillis ; logger . info ( "Job '{}' has finished executing after about {}ms" , job . name ( ) , jobExecutionDuration ) ; } longRunningJobs . remove ( job ) ; return jobExecutionDuration ; } return null ;
|
public class Validate { /** * Checks if the given String is NOT a positive double value . < br >
* This method tries to parse a double value and then checks if it is smaller or equal to 0.
* @ param value The String value to validate .
* @ return The parsed double value
* @ throws ParameterException if the given String value cannot be parsed as double or its value is bigger than 0. */
public static Double notPositiveDouble ( String value ) { } }
|
Double doubleValue = Validate . isDouble ( value ) ; notPositive ( doubleValue ) ; return doubleValue ;
|
public class Client { /** * Get the { @ link ApplicationDefinition } for the given application name . If the
* connected Doradus server has no such application defined , null is returned .
* @ param appName Application name .
* @ return Application ' s { @ link ApplicationDefinition } , if it exists ,
* otherwise null . */
public ApplicationDefinition getAppDef ( String appName ) { } }
|
Utils . require ( ! m_restClient . isClosed ( ) , "Client has been closed" ) ; Utils . require ( appName != null && appName . length ( ) > 0 , "appName" ) ; try { // Send a GET request to " / _ applications / { application }
StringBuilder uri = new StringBuilder ( Utils . isEmpty ( m_apiPrefix ) ? "" : "/" + m_apiPrefix ) ; uri . append ( "/_applications/" ) ; uri . append ( Utils . urlEncode ( appName ) ) ; RESTResponse response = m_restClient . sendRequest ( HttpMethod . GET , uri . toString ( ) ) ; m_logger . debug ( "listApplication() response: {}" , response . toString ( ) ) ; if ( response . getCode ( ) == HttpCode . NOT_FOUND ) { return null ; } throwIfErrorResponse ( response ) ; ApplicationDefinition appDef = new ApplicationDefinition ( ) ; appDef . parse ( UNode . parse ( response . getBody ( ) , response . getContentType ( ) ) ) ; return appDef ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
|
public class Convert { /** * 转换为Float < br >
* 如果给定的值为空 , 或者转换失败 , 返回默认值 < br >
* 转换失败不会报错
* @ param value 被转换的值
* @ param defaultValue 转换错误时的默认值
* @ return 结果 */
public static Float toFloat ( Object value , Float defaultValue ) { } }
|
return convert ( Float . class , value , defaultValue ) ;
|
public class Bytes { /** * Build a long from first 8 bytes of the array .
* @ param b The byte [ ] to convert .
* @ return A long . */
public static long toLong ( byte [ ] b ) { } }
|
return ( ( ( ( long ) b [ 7 ] ) & 0xFF ) + ( ( ( ( long ) b [ 6 ] ) & 0xFF ) << 8 ) + ( ( ( ( long ) b [ 5 ] ) & 0xFF ) << 16 ) + ( ( ( ( long ) b [ 4 ] ) & 0xFF ) << 24 ) + ( ( ( ( long ) b [ 3 ] ) & 0xFF ) << 32 ) + ( ( ( ( long ) b [ 2 ] ) & 0xFF ) << 40 ) + ( ( ( ( long ) b [ 1 ] ) & 0xFF ) << 48 ) + ( ( ( ( long ) b [ 0 ] ) & 0xFF ) << 56 ) ) ;
|
public class CharTrie { /** * Tokens set .
* @ return the set */
public Set < Character > tokens ( ) { } }
|
return root ( ) . getChildrenMap ( ) . keySet ( ) . stream ( ) . filter ( c -> c != END_OF_STRING && c != FALLBACK && c != ESCAPE ) . collect ( Collectors . toSet ( ) ) ;
|
public class File { /** * Write strings and a newline .
* Implements a JavaScript function .
* @ exception IOException if an error occurred while accessing the file
* associated with this object */
@ JSFunction public static void writeLine ( Context cx , Scriptable thisObj , Object [ ] args , Function funObj ) throws IOException { } }
|
write0 ( thisObj , args , true ) ;
|
public class Iterate { /** * Iterate over the specified collection applying the specified Function to each element to calculate
* a key and return the results as a Map . */
public static < T , K > MutableMap < K , T > toMap ( Iterable < T > iterable , Function < ? super T , ? extends K > keyFunction ) { } }
|
MutableMap < K , T > map = UnifiedMap . newMap ( ) ; Iterate . forEach ( iterable , new MapCollectProcedure < T , K , T > ( map , keyFunction ) ) ; return map ;
|
public class VoiceApi { /** * Perform a single - step conference to the specified destination . This adds the destination to the
* existing call , creating a conference if necessary .
* @ param connId The connection ID of the call to conference .
* @ param destination The number to add to the call .
* @ param location Name of the remote location in the form of < SwitchName > or < T - ServerApplicationName > @ < SwitchName > . This value is used by Workspace to set the location attribute for the corresponding T - Server requests . ( optional )
* @ param userData Key / value data to include with the call . ( optional )
* @ param reasons Information on causes for , and results of , actions taken by the user of the current DN . For details about reasons , refer to the [ * Genesys Events and Models Reference Manual * ] ( https : / / docs . genesys . com / Documentation / System / Current / GenEM / Reasons ) . ( optional )
* @ param extensions Media device / hardware reason codes and similar information . For details about extensions , refer to the [ * Genesys Events and Models Reference Manual * ] ( https : / / docs . genesys . com / Documentation / System / Current / GenEM / Extensions ) . ( optional ) */
public void singleStepConference ( String connId , String destination , String location , KeyValueCollection userData , KeyValueCollection reasons , KeyValueCollection extensions ) throws WorkspaceApiException { } }
|
try { VoicecallsidsinglestepconferenceData confData = new VoicecallsidsinglestepconferenceData ( ) ; confData . setDestination ( destination ) ; confData . setLocation ( location ) ; confData . setUserData ( Util . toKVList ( userData ) ) ; confData . setReasons ( Util . toKVList ( reasons ) ) ; confData . setExtensions ( Util . toKVList ( extensions ) ) ; SingleStepConferenceData data = new SingleStepConferenceData ( ) ; data . data ( confData ) ; ApiSuccessResponse response = this . voiceApi . singleStepConference ( connId , data ) ; throwIfNotOk ( "singleStepConference" , response ) ; } catch ( ApiException e ) { throw new WorkspaceApiException ( "singleStepConference failed" , e ) ; }
|
public class TriangleListing { /** * Implementation notes :
* The requirement that " K extends CopyableValue < K > " can be removed when
* Flink has a self - join and GenerateTriplets is implemented as such .
* ProjectTriangles should eventually be replaced by " . projectFirst ( " * " ) "
* when projections use code generation . */
@ Override public DataSet < Result < K > > runInternal ( Graph < K , VV , EV > input ) throws Exception { } }
|
// u , v where u < v
DataSet < Tuple2 < K , K > > filteredByID = input . getEdges ( ) . flatMap ( new FilterByID < > ( ) ) . setParallelism ( parallelism ) . name ( "Filter by ID" ) ; // u , v , ( edge value , deg ( u ) , deg ( v ) )
DataSet < Edge < K , Tuple3 < EV , LongValue , LongValue > > > pairDegree = input . run ( new EdgeDegreePair < K , VV , EV > ( ) . setParallelism ( parallelism ) ) ; // u , v where deg ( u ) < deg ( v ) or ( deg ( u ) = = deg ( v ) and u < v )
DataSet < Tuple2 < K , K > > filteredByDegree = pairDegree . flatMap ( new FilterByDegree < > ( ) ) . setParallelism ( parallelism ) . name ( "Filter by degree" ) ; // u , v , w where ( u , v ) and ( u , w ) are edges in graph , v < w
DataSet < Tuple3 < K , K , K > > triplets = filteredByDegree . groupBy ( 0 ) . sortGroup ( 1 , Order . ASCENDING ) . reduceGroup ( new GenerateTriplets < > ( ) ) . name ( "Generate triplets" ) ; // u , v , w where ( u , v ) , ( u , w ) , and ( v , w ) are edges in graph , v < w
DataSet < Result < K > > triangles = triplets . join ( filteredByID , JoinOperatorBase . JoinHint . REPARTITION_HASH_SECOND ) . where ( 1 , 2 ) . equalTo ( 0 , 1 ) . with ( new ProjectTriangles < > ( ) ) . name ( "Triangle listing" ) ; if ( permuteResults ) { triangles = triangles . flatMap ( new PermuteResult < > ( ) ) . name ( "Permute triangle vertices" ) ; } else if ( sortTriangleVertices . get ( ) ) { triangles = triangles . map ( new SortTriangleVertices < > ( ) ) . name ( "Sort triangle vertices" ) ; } return triangles ;
|
public class TransparentDataEncryptionsInner { /** * Creates or updates a database ' s transparent data encryption configuration .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param databaseName The name of the database for which setting the transparent data encryption applies .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the TransparentDataEncryptionInner object if successful . */
public TransparentDataEncryptionInner createOrUpdate ( String resourceGroupName , String serverName , String databaseName ) { } }
|
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class LauncherUtils { /** * Creates initial runtime config of scheduler state manager adaptor
* @ return adaptor config */
public Config createAdaptorRuntime ( SchedulerStateManagerAdaptor adaptor ) { } }
|
return Config . newBuilder ( ) . put ( Key . SCHEDULER_STATE_MANAGER_ADAPTOR , adaptor ) . build ( ) ;
|
public class ProcessorInclude { /** * Set off a new parse for an included or imported stylesheet . This will
* set the { @ link StylesheetHandler } to a new state , and recurse in with
* a new set of parse events . Once this function returns , the state of
* the StylesheetHandler should be restored .
* @ param handler non - null reference to current StylesheetHandler that is constructing the Templates .
* @ param uri The Namespace URI , which should be the XSLT namespace .
* @ param localName The local name ( without prefix ) , which should be " include " or " import " .
* @ param rawName The qualified name ( with prefix ) .
* @ param attributes The list of attributes on the xsl : include or xsl : import element .
* @ throws org . xml . sax . SAXException Any SAX exception , possibly
* wrapping another exception . */
protected void parse ( StylesheetHandler handler , String uri , String localName , String rawName , Attributes attributes ) throws org . xml . sax . SAXException { } }
|
TransformerFactoryImpl processor = handler . getStylesheetProcessor ( ) ; URIResolver uriresolver = processor . getURIResolver ( ) ; try { Source source = null ; // The base identifier , an aboslute URI
// that is associated with the included / imported
// stylesheet module is known in this method ,
// so this method does the pushing of the
// base ID onto the stack .
if ( null != uriresolver ) { // There is a user provided URI resolver .
// At the startElement ( ) call we would
// have tried to obtain a Source from it
// which we now retrieve
source = handler . peekSourceFromURIResolver ( ) ; if ( null != source && source instanceof DOMSource ) { Node node = ( ( DOMSource ) source ) . getNode ( ) ; // There is a user provided URI resolver .
// At the startElement ( ) call we would
// have already pushed the system ID , obtained
// from either the source . getSystemId ( ) , if non - null
// or from SystemIDResolver . getAbsoluteURI ( ) as a backup
// which we now retrieve .
String systemId = handler . peekImportURL ( ) ; // Push the absolute URI of the included / imported
// stylesheet module onto the stack .
if ( systemId != null ) handler . pushBaseIndentifier ( systemId ) ; TreeWalker walker = new TreeWalker ( handler , new org . apache . xml . utils . DOM2Helper ( ) , systemId ) ; try { walker . traverse ( node ) ; } catch ( org . xml . sax . SAXException se ) { throw new TransformerException ( se ) ; } if ( systemId != null ) handler . popBaseIndentifier ( ) ; return ; } } if ( null == source ) { String absURL = SystemIDResolver . getAbsoluteURI ( getHref ( ) , handler . getBaseIdentifier ( ) ) ; source = new StreamSource ( absURL ) ; } // possible callback to a class that over - rides this method .
source = processSource ( handler , source ) ; XMLReader reader = null ; if ( source instanceof SAXSource ) { SAXSource saxSource = ( SAXSource ) source ; reader = saxSource . getXMLReader ( ) ; // may be null
} InputSource inputSource = SAXSource . sourceToInputSource ( source ) ; if ( null == reader ) { // Use JAXP1.1 ( if possible )
try { javax . xml . parsers . SAXParserFactory factory = javax . xml . parsers . SAXParserFactory . newInstance ( ) ; factory . setNamespaceAware ( true ) ; if ( handler . getStylesheetProcessor ( ) . isSecureProcessing ( ) ) { try { factory . setFeature ( XMLConstants . FEATURE_SECURE_PROCESSING , true ) ; } catch ( org . xml . sax . SAXException se ) { } } javax . xml . parsers . SAXParser jaxpParser = factory . newSAXParser ( ) ; reader = jaxpParser . getXMLReader ( ) ; } catch ( javax . xml . parsers . ParserConfigurationException ex ) { throw new org . xml . sax . SAXException ( ex ) ; } catch ( javax . xml . parsers . FactoryConfigurationError ex1 ) { throw new org . xml . sax . SAXException ( ex1 . toString ( ) ) ; } catch ( NoSuchMethodError ex2 ) { } catch ( AbstractMethodError ame ) { } } if ( null == reader ) reader = XMLReaderFactory . createXMLReader ( ) ; if ( null != reader ) { reader . setContentHandler ( handler ) ; // Push the absolute URI of the included / imported
// stylesheet module onto the stack .
handler . pushBaseIndentifier ( inputSource . getSystemId ( ) ) ; try { reader . parse ( inputSource ) ; } finally { handler . popBaseIndentifier ( ) ; } } } catch ( IOException ioe ) { handler . error ( XSLTErrorResources . ER_IOEXCEPTION , new Object [ ] { getHref ( ) } , ioe ) ; } catch ( TransformerException te ) { handler . error ( te . getMessage ( ) , te ) ; }
|
public class BatchingEntityLoaderBuilder { /** * Builds a batch - fetch capable loader based on the given persister , lock - mode , etc .
* @ param persister The entity persister
* @ param batchSize The maximum number of ids to batch - fetch at once
* @ param lockMode The lock mode
* @ param factory The SessionFactory
* @ param influencers Any influencers that should affect the built query
* @ param innerEntityLoaderBuilder Builder of the entity loader receiving the subset of batches
* @ return The loader . */
public UniqueEntityLoader buildLoader ( OuterJoinLoadable persister , int batchSize , LockMode lockMode , SessionFactoryImplementor factory , LoadQueryInfluencers influencers , BatchableEntityLoaderBuilder innerEntityLoaderBuilder ) { } }
|
if ( batchSize <= 1 ) { // no batching
return buildNonBatchingLoader ( persister , lockMode , factory , influencers , innerEntityLoaderBuilder ) ; } return buildBatchingLoader ( persister , batchSize , lockMode , factory , influencers , innerEntityLoaderBuilder ) ;
|
public class Representation { /** * Returns an { @ code InputStream } over the binary data of this representation object .
* Conversion from character to byte data , if required , is performed according to the charset
* specified by the MIME type metadata property ( { @ link NIE # MIME _ TYPE } ) .
* @ return an { @ code InputStream } over the binary content of this representation */
public InputStream getInputStream ( ) { } }
|
if ( this . data instanceof InputStream ) { return ( InputStream ) this . data ; } else { final Reader reader = ( Reader ) this . data ; return new ReaderInputStream ( reader , getCharset ( ) ) ; }
|
public class PackageInfo { /** * Get the { @ link ClassInfo } objects within this package recursively .
* @ param reachableClassInfo
* the reachable class info */
private void obtainClassInfoRecursive ( final Set < ClassInfo > reachableClassInfo ) { } }
|
reachableClassInfo . addAll ( memberClassNameToClassInfo . values ( ) ) ; for ( final PackageInfo subPackageInfo : getChildren ( ) ) { subPackageInfo . obtainClassInfoRecursive ( reachableClassInfo ) ; }
|
public class Executable { /** * Used to invoke executable asynchronously . */
public void run ( ) { } }
|
try { Object result = execute ( ) ; if ( ! aborted ) { this . retval = result ; finished = true ; } } catch ( InterruptedException ie ) { } catch ( Throwable t ) { execException = t ; finished = true ; }
|
public class DirectedGraph { /** * Report ( as a Map ) the out - degree ( the number of tail ends adjacent to a vertex ) of each vertex . */
public Map < V , Integer > outDegree ( ) { } }
|
Map < V , Integer > result = new HashMap < > ( ) ; for ( V vertex : neighbors . keySet ( ) ) { result . put ( vertex , neighbors . get ( vertex ) . size ( ) ) ; } return result ;
|
public class SchemaHelper { /** * Extracts the value of a specified parameter in a schema
* @ param searchString
* element to search for
* @ param schema
* Schema as a string
* @ return the value or null if not found */
private static String extractTopItem ( String searchString , String schema , int startIdx ) { } }
|
String extracted = null ; int propIdx = schema . indexOf ( "\"properties\"" , startIdx ) ; if ( propIdx == - 1 ) { propIdx = Integer . MAX_VALUE ; } // check for second
int idIdx = schema . indexOf ( "\"" + searchString + "\"" , startIdx ) ; int secondIdIdx = schema . indexOf ( "\"" + searchString + "\"" , idIdx + 1 ) ; if ( secondIdIdx != - 1 && propIdx > secondIdIdx ) { idIdx = secondIdIdx ; } if ( idIdx != - 1 && propIdx > idIdx ) { // make sure we ' re not in a nested
// id
// find the 1st and second " after the idx
int valueStartIdx = schema . indexOf ( "\"" , idIdx + ( searchString . length ( ) + 2 ) ) ; int valueEndIdx = schema . indexOf ( "\"" , valueStartIdx + 1 ) ; extracted = schema . substring ( valueStartIdx + 1 , valueEndIdx ) ; } return extracted ;
|
public class SlingApi { /** * ( asynchronously )
* @ param path ( required )
* @ param pLimit ( required )
* @ param _ 1Property ( required )
* @ param _ 1PropertyValue ( required )
* @ param callback The callback to be executed when the API call finishes
* @ return The request call
* @ throws ApiException If fail to process the API call , e . g . serializing the request body object */
public com . squareup . okhttp . Call postQueryAsync ( String path , BigDecimal pLimit , String _1Property , String _1PropertyValue , final ApiCallback < String > callback ) throws ApiException { } }
|
ProgressResponseBody . ProgressListener progressListener = null ; ProgressRequestBody . ProgressRequestListener progressRequestListener = null ; if ( callback != null ) { progressListener = new ProgressResponseBody . ProgressListener ( ) { @ Override public void update ( long bytesRead , long contentLength , boolean done ) { callback . onDownloadProgress ( bytesRead , contentLength , done ) ; } } ; progressRequestListener = new ProgressRequestBody . ProgressRequestListener ( ) { @ Override public void onRequestProgress ( long bytesWritten , long contentLength , boolean done ) { callback . onUploadProgress ( bytesWritten , contentLength , done ) ; } } ; } com . squareup . okhttp . Call call = postQueryValidateBeforeCall ( path , pLimit , _1Property , _1PropertyValue , progressListener , progressRequestListener ) ; Type localVarReturnType = new TypeToken < String > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
|
public class ModuleImportResolver { /** * Returns the corresponding scope root Node from a goog . module . */
@ Nullable private Node getGoogModuleScopeRoot ( @ Nullable Module module ) { } }
|
checkArgument ( module . metadata ( ) . isGoogModule ( ) , module . metadata ( ) ) ; Node scriptNode = module . metadata ( ) . rootNode ( ) ; if ( scriptNode . isScript ( ) && scriptNode . hasOneChild ( ) && scriptNode . getOnlyChild ( ) . isModuleBody ( ) ) { // The module root node should be a SCRIPT , whose first child is a MODULE _ BODY .
// The map is keyed off a MODULE _ BODY node for a goog . module ,
// which is the only child of our SCRIPT node .
return scriptNode . getOnlyChild ( ) ; } else if ( scriptNode . isCall ( ) ) { // This is a goog . loadModule call , and the scope is keyed off the FUNCTION node ' s BLOCK in :
// goog . loadModule ( function ( exports )
Node functionLiteral = scriptNode . getSecondChild ( ) ; return NodeUtil . getFunctionBody ( functionLiteral ) ; } // TODO ( b / 124919359 ) : this case should not happen , but is triggering on goog . require calls in
// rewritten modules with preserveClosurePrimitives enabled .
return null ;
|
public class URLConnection { /** * Looks for a content handler in a user - defineable set of places .
* By default it looks in sun . net . www . content , but users can define a
* vertical - bar delimited set of class prefixes to search through in
* addition by defining the java . content . handler . pkgs property .
* The class name must be of the form :
* < pre >
* { package - prefix } . { major } . { minor }
* e . g .
* YoyoDyne . experimental . text . plain
* < / pre > */
private ContentHandler lookupContentHandlerClassFor ( String contentType ) throws InstantiationException , IllegalAccessException , ClassNotFoundException { } }
|
String contentHandlerClassName = typeToPackageName ( contentType ) ; String contentHandlerPkgPrefixes = getContentHandlerPkgPrefixes ( ) ; StringTokenizer packagePrefixIter = new StringTokenizer ( contentHandlerPkgPrefixes , "|" ) ; while ( packagePrefixIter . hasMoreTokens ( ) ) { String packagePrefix = packagePrefixIter . nextToken ( ) . trim ( ) ; try { String clsName = packagePrefix + "." + contentHandlerClassName ; Class cls = null ; try { cls = Class . forName ( clsName ) ; } catch ( ClassNotFoundException e ) { ClassLoader cl = ClassLoader . getSystemClassLoader ( ) ; if ( cl != null ) { cls = cl . loadClass ( clsName ) ; } } if ( cls != null ) { ContentHandler handler = ( ContentHandler ) cls . newInstance ( ) ; return handler ; } } catch ( Exception e ) { } } return UnknownContentHandler . INSTANCE ;
|
public class ChangesetsDao { /** * Get a number of changesets that match the given filters .
* @ param handler The handler which is fed the incoming changeset infos
* @ param filters what to search for . I . e .
* new QueryChangesetsFilters ( ) . byUser ( 123 ) . onlyClosed ( )
* @ throws OsmAuthorizationException if not logged in */
public void find ( Handler < ChangesetInfo > handler , QueryChangesetsFilters filters ) { } }
|
String query = filters != null ? "?" + filters . toParamString ( ) : "" ; try { osm . makeAuthenticatedRequest ( CHANGESET + "s" + query , null , new ChangesetParser ( handler ) ) ; } catch ( OsmNotFoundException e ) { // ok , we are done ( ignore the exception )
}
|
public class WikipediaXMLReader { /** * Creates and initializes the xml keyword tree . */
private void initXMLKeys ( ) { } }
|
this . keywords = new SingleKeywordTree < WikipediaXMLKeys > ( ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_PAGE . getKeyword ( ) , WikipediaXMLKeys . KEY_START_PAGE ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_PAGE . getKeyword ( ) , WikipediaXMLKeys . KEY_END_PAGE ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_TITLE . getKeyword ( ) , WikipediaXMLKeys . KEY_START_TITLE ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_TITLE . getKeyword ( ) , WikipediaXMLKeys . KEY_END_TITLE ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_ID . getKeyword ( ) , WikipediaXMLKeys . KEY_START_ID ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_ID . getKeyword ( ) , WikipediaXMLKeys . KEY_END_ID ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_REVISION . getKeyword ( ) , WikipediaXMLKeys . KEY_START_REVISION ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_REVISION . getKeyword ( ) , WikipediaXMLKeys . KEY_END_REVISION ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_TIMESTAMP . getKeyword ( ) , WikipediaXMLKeys . KEY_START_TIMESTAMP ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_TIMESTAMP . getKeyword ( ) , WikipediaXMLKeys . KEY_END_TIMESTAMP ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_TEXT . getKeyword ( ) , WikipediaXMLKeys . KEY_START_TEXT ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_TEXT . getKeyword ( ) , WikipediaXMLKeys . KEY_END_TEXT ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_MINOR_FLAG . getKeyword ( ) , WikipediaXMLKeys . KEY_MINOR_FLAG ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_COMMENT . getKeyword ( ) , WikipediaXMLKeys . KEY_START_COMMENT ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_COMMENT . getKeyword ( ) , WikipediaXMLKeys . KEY_END_COMMENT ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_IP . getKeyword ( ) , WikipediaXMLKeys . KEY_START_IP ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_IP . getKeyword ( ) , WikipediaXMLKeys . KEY_END_IP ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_USERNAME . getKeyword ( ) , WikipediaXMLKeys . KEY_START_USERNAME ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_USERNAME . getKeyword ( ) , WikipediaXMLKeys . KEY_END_USERNAME ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_CONTRIBUTOR . getKeyword ( ) , WikipediaXMLKeys . KEY_START_CONTRIBUTOR ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_CONTRIBUTOR . getKeyword ( ) , WikipediaXMLKeys . KEY_END_CONTRIBUTOR ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_NAMESPACES . getKeyword ( ) , WikipediaXMLKeys . KEY_START_NAMESPACES ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_NAMESPACES . getKeyword ( ) , WikipediaXMLKeys . KEY_END_NAMESPACES ) ;
|
public class CtClassUtil { /** * 获取方法的参数名称 .
* @ param ctMethod
* @ return
* @ throws NotFoundException */
public static String [ ] getParameterNames ( CtMethod ctMethod ) throws NotFoundException { } }
|
MethodInfo methodInfo = ctMethod . getMethodInfo ( ) ; CodeAttribute codeAttribute = methodInfo . getCodeAttribute ( ) ; // logger . info ( " methodInfo . getConstPool ( ) . getSize ( ) : " ) ;
LocalVariableAttribute attribute = ( LocalVariableAttribute ) codeAttribute . getAttribute ( LocalVariableAttribute . tag ) ; // String [ ] names = new String [ attribute . tableLength ( ) - 1 ] ;
String [ ] paramNames = new String [ ctMethod . getParameterTypes ( ) . length ] ; int pos = 0 ; if ( true ) { int size = attribute . tableLength ( ) ; if ( size > 0 ) { String [ ] names = new String [ size - 1 ] ; for ( int i = 0 ; i < names . length ; i ++ ) { names [ i ] = attribute . variableName ( i ) ; if ( "this" . equals ( names [ i ] ) ) { pos = i + 1 ; break ; } } // logger . info ( methodInfo . getName ( ) + " pos : " + pos + " allNames : " + StringUtils . join ( names , " , " ) ) ;
} } // logger . info ( methodInfo . getName ( ) + " pos : " + pos ) ;
for ( int i = 0 ; i < paramNames . length ; i ++ ) { // paramNames [ i ] = attribute . variableName ( i + 1 ) ;
try { paramNames [ i ] = attribute . variableName ( i + pos ) ; // logger . info ( " paramNames [ " + i + " ] : " + paramNames [ i ] ) ;
} catch ( RuntimeException e ) { throw e ; } } // System . err . println ( " paramNames : " + StringUtils . join ( paramNames ) ) ;
return paramNames ;
|
public class CUdevice_attribute { /** * Returns the String identifying the given CUdevice _ attribute
* @ param n The CUdevice _ attribute
* @ return The String identifying the given CUdevice _ attribute */
public static String stringFor ( int n ) { } }
|
switch ( n ) { case CU_DEVICE_ATTRIBUTE_MAX_THREADS_PER_BLOCK : return "CU_DEVICE_ATTRIBUTE_MAX_THREADS_PER_BLOCK" ; case CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_X : return "CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_X" ; case CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_Y : return "CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_Y" ; case CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_Z : return "CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_Z" ; case CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_X : return "CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_X" ; case CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_Y : return "CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_Y" ; case CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_Z : return "CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_Z" ; case CU_DEVICE_ATTRIBUTE_MAX_SHARED_MEMORY_PER_BLOCK : return "CU_DEVICE_ATTRIBUTE_MAX_SHARED_MEMORY_PER_BLOCK" ; case CU_DEVICE_ATTRIBUTE_TOTAL_CONSTANT_MEMORY : return "CU_DEVICE_ATTRIBUTE_TOTAL_CONSTANT_MEMORY" ; case CU_DEVICE_ATTRIBUTE_WARP_SIZE : return "CU_DEVICE_ATTRIBUTE_WARP_SIZE" ; case CU_DEVICE_ATTRIBUTE_MAX_PITCH : return "CU_DEVICE_ATTRIBUTE_MAX_PITCH" ; case CU_DEVICE_ATTRIBUTE_MAX_REGISTERS_PER_BLOCK : return "CU_DEVICE_ATTRIBUTE_MAX_REGISTERS_PER_BLOCK" ; case CU_DEVICE_ATTRIBUTE_CLOCK_RATE : return "CU_DEVICE_ATTRIBUTE_CLOCK_RATE" ; case CU_DEVICE_ATTRIBUTE_TEXTURE_ALIGNMENT : return "CU_DEVICE_ATTRIBUTE_TEXTURE_ALIGNMENT" ; case CU_DEVICE_ATTRIBUTE_GPU_OVERLAP : return "CU_DEVICE_ATTRIBUTE_GPU_OVERLAP" ; case CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT : return "CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT" ; case CU_DEVICE_ATTRIBUTE_KERNEL_EXEC_TIMEOUT : return "CU_DEVICE_ATTRIBUTE_KERNEL_EXEC_TIMEOUT" ; case CU_DEVICE_ATTRIBUTE_INTEGRATED : return "CU_DEVICE_ATTRIBUTE_INTEGRATED" ; case CU_DEVICE_ATTRIBUTE_CAN_MAP_HOST_MEMORY : return "CU_DEVICE_ATTRIBUTE_CAN_MAP_HOST_MEMORY" ; case CU_DEVICE_ATTRIBUTE_COMPUTE_MODE : return "CU_DEVICE_ATTRIBUTE_COMPUTE_MODE" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_DEPTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_DEPTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_LAYERS : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_LAYERS" ; case CU_DEVICE_ATTRIBUTE_SURFACE_ALIGNMENT : return "CU_DEVICE_ATTRIBUTE_SURFACE_ALIGNMENT" ; case CU_DEVICE_ATTRIBUTE_CONCURRENT_KERNELS : return "CU_DEVICE_ATTRIBUTE_CONCURRENT_KERNELS" ; case CU_DEVICE_ATTRIBUTE_ECC_ENABLED : return "CU_DEVICE_ATTRIBUTE_ECC_ENABLED" ; case CU_DEVICE_ATTRIBUTE_PCI_BUS_ID : return "CU_DEVICE_ATTRIBUTE_PCI_BUS_ID" ; case CU_DEVICE_ATTRIBUTE_PCI_DEVICE_ID : return "CU_DEVICE_ATTRIBUTE_PCI_DEVICE_ID" ; case CU_DEVICE_ATTRIBUTE_TCC_DRIVER : return "CU_DEVICE_ATTRIBUTE_PCI_DEVICE_ID" ; case CU_DEVICE_ATTRIBUTE_MEMORY_CLOCK_RATE : return "CU_DEVICE_ATTRIBUTE_MEMORY_CLOCK_RATE" ; case CU_DEVICE_ATTRIBUTE_GLOBAL_MEMORY_BUS_WIDTH : return "CU_DEVICE_ATTRIBUTE_GLOBAL_MEMORY_BUS_WIDTH" ; case CU_DEVICE_ATTRIBUTE_L2_CACHE_SIZE : return "CU_DEVICE_ATTRIBUTE_L2_CACHE_SIZE" ; case CU_DEVICE_ATTRIBUTE_MAX_THREADS_PER_MULTIPROCESSOR : return "CU_DEVICE_ATTRIBUTE_MAX_THREADS_PER_MULTIPROCESSOR" ; case CU_DEVICE_ATTRIBUTE_ASYNC_ENGINE_COUNT : return "CU_DEVICE_ATTRIBUTE_ASYNC_ENGINE_COUNT" ; case CU_DEVICE_ATTRIBUTE_UNIFIED_ADDRESSING : return "CU_DEVICE_ATTRIBUTE_UNIFIED_ADDRESSING" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LAYERED_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LAYERED_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LAYERED_LAYERS : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LAYERED_LAYERS" ; case CU_DEVICE_ATTRIBUTE_CAN_TEX2D_GATHER : return "CU_DEVICE_ATTRIBUTE_CAN_TEX2D_GATHER" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_GATHER_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_GATHER_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_GATHER_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_GATHER_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_WIDTH_ALTERNATE : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_WIDTH_ALTERNATE" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_HEIGHT_ALTERNATE : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_HEIGHT_ALTERNATE" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_DEPTH_ALTERNATE : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_DEPTH_ALTERNATE" ; case CU_DEVICE_ATTRIBUTE_PCI_DOMAIN_ID : return "CU_DEVICE_ATTRIBUTE_PCI_DOMAIN_ID" ; case CU_DEVICE_ATTRIBUTE_TEXTURE_PITCH_ALIGNMENT : return "CU_DEVICE_ATTRIBUTE_TEXTURE_PITCH_ALIGNMENT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_LAYERED_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_LAYERED_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_LAYERED_LAYERS : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_LAYERED_LAYERS" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_DEPTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_DEPTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_LAYERED_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_LAYERED_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_LAYERED_LAYERS : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_LAYERED_LAYERS" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_LAYERS : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_LAYERS" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_LAYERED_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_LAYERED_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_LAYERED_LAYERS : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_LAYERED_LAYERS" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LINEAR_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LINEAR_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_PITCH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_PITCH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_MIPMAPPED_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_MIPMAPPED_WIDTH" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_MIPMAPPED_HEIGHT : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_MIPMAPPED_HEIGHT" ; case CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MAJOR : return "CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MAJOR" ; case CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MINOR : return "CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MINOR" ; case CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_MIPMAPPED_WIDTH : return "CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_MIPMAPPED_WIDTH" ; case CU_DEVICE_ATTRIBUTE_STREAM_PRIORITIES_SUPPORTED : return "CU_DEVICE_ATTRIBUTE_STREAM_PRIORITIES_SUPPORTED" ; case CU_DEVICE_ATTRIBUTE_GLOBAL_L1_CACHE_SUPPORTED : return "CU_DEVICE_ATTRIBUTE_GLOBAL_L1_CACHE_SUPPORTED" ; case CU_DEVICE_ATTRIBUTE_LOCAL_L1_CACHE_SUPPORTED : return "CU_DEVICE_ATTRIBUTE_LOCAL_L1_CACHE_SUPPORTED" ; case CU_DEVICE_ATTRIBUTE_MAX_SHARED_MEMORY_PER_MULTIPROCESSOR : return "CU_DEVICE_ATTRIBUTE_MAX_SHARED_MEMORY_PER_MULTIPROCESSOR" ; case CU_DEVICE_ATTRIBUTE_MAX_REGISTERS_PER_MULTIPROCESSOR : return "CU_DEVICE_ATTRIBUTE_MAX_REGISTERS_PER_MULTIPROCESSOR" ; case CU_DEVICE_ATTRIBUTE_MANAGED_MEMORY : return "CU_DEVICE_ATTRIBUTE_MANAGED_MEMORY" ; case CU_DEVICE_ATTRIBUTE_MULTI_GPU_BOARD : return "CU_DEVICE_ATTRIBUTE_MULTI_GPU_BOARD" ; case CU_DEVICE_ATTRIBUTE_MULTI_GPU_BOARD_GROUP_ID : return "CU_DEVICE_ATTRIBUTE_MULTI_GPU_BOARD_GROUP_ID" ; case CU_DEVICE_ATTRIBUTE_HOST_NATIVE_ATOMIC_SUPPORTED : return "CU_DEVICE_ATTRIBUTE_HOST_NATIVE_ATOMIC_SUPPORTED" ; case CU_DEVICE_ATTRIBUTE_SINGLE_TO_DOUBLE_PRECISION_PERF_RATIO : return "CU_DEVICE_ATTRIBUTE_SINGLE_TO_DOUBLE_PRECISION_PERF_RATIO" ; case CU_DEVICE_ATTRIBUTE_PAGEABLE_MEMORY_ACCESS : return "CU_DEVICE_ATTRIBUTE_PAGEABLE_MEMORY_ACCESS" ; case CU_DEVICE_ATTRIBUTE_CONCURRENT_MANAGED_ACCESS : return "CU_DEVICE_ATTRIBUTE_CONCURRENT_MANAGED_ACCESS" ; case CU_DEVICE_ATTRIBUTE_COMPUTE_PREEMPTION_SUPPORTED : return "CU_DEVICE_ATTRIBUTE_COMPUTE_PREEMPTION_SUPPORTED" ; case CU_DEVICE_ATTRIBUTE_CAN_USE_HOST_POINTER_FOR_REGISTERED_MEM : return "CU_DEVICE_ATTRIBUTE_CAN_USE_HOST_POINTER_FOR_REGISTERED_MEM" ; case CU_DEVICE_ATTRIBUTE_CAN_USE_STREAM_MEM_OPS : return "CU_DEVICE_ATTRIBUTE_CAN_USE_STREAM_MEM_OPS" ; case CU_DEVICE_ATTRIBUTE_CAN_USE_64_BIT_STREAM_MEM_OPS : return "CU_DEVICE_ATTRIBUTE_CAN_USE_64_BIT_STREAM_MEM_OPS" ; case CU_DEVICE_ATTRIBUTE_CAN_USE_STREAM_WAIT_VALUE_NOR : return "CU_DEVICE_ATTRIBUTE_CAN_USE_STREAM_WAIT_VALUE_NOR" ; case CU_DEVICE_ATTRIBUTE_COOPERATIVE_LAUNCH : return "CU_DEVICE_ATTRIBUTE_COOPERATIVE_LAUNCH" ; case CU_DEVICE_ATTRIBUTE_COOPERATIVE_MULTI_DEVICE_LAUNCH : return "CU_DEVICE_ATTRIBUTE_COOPERATIVE_MULTI_DEVICE_LAUNCH" ; case CU_DEVICE_ATTRIBUTE_MAX_SHARED_MEMORY_PER_BLOCK_OPTIN : return "CU_DEVICE_ATTRIBUTE_MAX_SHARED_MEMORY_PER_BLOCK_OPTIN" ; case CU_DEVICE_ATTRIBUTE_CAN_FLUSH_REMOTE_WRITES : return "CU_DEVICE_ATTRIBUTE_CAN_FLUSH_REMOTE_WRITES" ; case CU_DEVICE_ATTRIBUTE_HOST_REGISTER_SUPPORTED : return "CU_DEVICE_ATTRIBUTE_HOST_REGISTER_SUPPORTED" ; case CU_DEVICE_ATTRIBUTE_PAGEABLE_MEMORY_ACCESS_USES_HOST_PAGE_TABLES : return "CU_DEVICE_ATTRIBUTE_PAGEABLE_MEMORY_ACCESS_USES_HOST_PAGE_TABLES" ; case CU_DEVICE_ATTRIBUTE_DIRECT_MANAGED_MEM_ACCESS_FROM_HOST : return "CU_DEVICE_ATTRIBUTE_DIRECT_MANAGED_MEM_ACCESS_FROM_HOST" ; } return "INVALID CUdevice_attribute: " + n ;
|
public class GoogleMapsTileMath { /** * Converts given coordinate in WGS84 Datum to XY in Spherical Mercator
* EPSG : 3857
* @ param lng the longitude of the coordinate
* @ param lat the latitude of the coordinate
* @ return The coordinate transformed to EPSG : 3857 */
public Coordinate lngLatToMeters ( double lng , double lat ) { } }
|
double mx = lng * originShift / 180.0 ; double my = Math . log ( Math . tan ( ( 90 + lat ) * Math . PI / 360.0 ) ) / ( Math . PI / 180.0 ) ; my *= originShift / 180.0 ; return new Coordinate ( mx , my ) ;
|
public class Record { /** * The end key position is in this record . . . Save it ! */
public void handleEndKey ( ) { } }
|
KeyArea keyArea = this . getKeyArea ( - 1 ) ; if ( keyArea == null ) return ; BaseBuffer buffer = new VectorBuffer ( null ) ; boolean [ ] rgbModified = keyArea . getModified ( ) ; boolean [ ] rgbNullable = keyArea . setNullable ( true ) ; keyArea . setupKeyBuffer ( buffer , DBConstants . FILE_KEY_AREA ) ; keyArea . zeroKeyFields ( DBConstants . END_SELECT_KEY ) ; // Set the key fields to a large value
BaseListener nextListener = this . getNextEnabledListener ( ) ; if ( nextListener != null ) ( ( FileListener ) nextListener ) . doEndKey ( ) ; else this . doEndKey ( ) ; keyArea . setNullable ( rgbNullable ) ; keyArea . setModified ( rgbModified ) ; keyArea . reverseKeyBuffer ( buffer , DBConstants . FILE_KEY_AREA ) ;
|
public class KuromojiCSVUtil { /** * Quote and escape input value for CSV
* @ param original Original text .
* @ return Escaped text . */
public static String quoteEscape ( final String original ) { } }
|
String result = original ; if ( result . indexOf ( '\"' ) >= 0 ) { result = result . replace ( "\"" , ESCAPED_QUOTE ) ; } if ( result . indexOf ( COMMA ) >= 0 ) { result = "\"" + result + "\"" ; } return result ;
|
public class OGNL { /** * 判断条件是 and 还是 or
* @ param parameter
* @ return */
public static String andOr ( Object parameter ) { } }
|
if ( parameter instanceof Example . Criteria ) { return ( ( Example . Criteria ) parameter ) . getAndOr ( ) ; } else if ( parameter instanceof Example . Criterion ) { return ( ( Example . Criterion ) parameter ) . getAndOr ( ) ; } else if ( parameter . getClass ( ) . getCanonicalName ( ) . endsWith ( "Criteria" ) ) { return "or" ; } else { return "and" ; }
|
public class ListMultimap { /** * Gets the number of values in the map .
* @ return the number of values */
public int size ( ) { } }
|
int size = 0 ; for ( List < V > value : map . values ( ) ) { size += value . size ( ) ; } return size ;
|
public class BaseSynthesizer { /** * Returns the { @ link Dictionary } used for this synthesizer .
* The dictionary file can be defined in the { @ link # BaseSynthesizer ( String , String ) constructor } .
* @ throws IOException In case the dictionary cannot be loaded . */
protected Dictionary getDictionary ( ) throws IOException { } }
|
Dictionary dict = this . dictionary ; if ( dict == null ) { synchronized ( this ) { dict = this . dictionary ; if ( dict == null ) { URL url = JLanguageTool . getDataBroker ( ) . getFromResourceDirAsUrl ( resourceFileName ) ; this . dictionary = dict = Dictionary . read ( url ) ; } } } return dict ;
|
public class Get { /** * Validates that the value from { @ code map } for the given { @ code key } is
* a present . Returns the value when present ; otherwise , throws a
* { @ code NoSuchElementException } .
* @ param map a map
* @ param key a key
* @ param < T > the type of value
* @ return the string value
* @ throws java . util . NoSuchElementException if the required value is not
* present */
public static < T > T required ( Map < String , T > map , String key ) { } }
|
if ( ! map . containsKey ( key ) ) { String message = String . format ( "Missing required value for key \"%s\"." , key ) ; throw new NoSuchElementException ( message ) ; } return map . get ( key ) ;
|
public class StatsAgent { /** * STARVATION */
private VoltTable [ ] collectManagementStats ( boolean interval ) { } }
|
VoltTable [ ] mStats = collectStats ( StatsSelector . MEMORY , interval ) ; VoltTable [ ] iStats = collectStats ( StatsSelector . INITIATOR , interval ) ; VoltTable [ ] pStats = collectStats ( StatsSelector . PROCEDURE , interval ) ; VoltTable [ ] ioStats = collectStats ( StatsSelector . IOSTATS , interval ) ; VoltTable [ ] tStats = collectStats ( StatsSelector . TABLE , interval ) ; VoltTable [ ] indStats = collectStats ( StatsSelector . INDEX , interval ) ; VoltTable [ ] sStats = collectStats ( StatsSelector . STARVATION , interval ) ; VoltTable [ ] qStats = collectStats ( StatsSelector . QUEUE , interval ) ; VoltTable [ ] cStats = collectStats ( StatsSelector . CPU , interval ) ; // Ugh , this is ugly . Currently need to return null if
// we ' re missing any of the tables so that we
// don ' t screw up the aggregation in handleStatsResponse ( see my rant there )
if ( mStats == null || iStats == null || pStats == null || ioStats == null || tStats == null || indStats == null || sStats == null || qStats == null || cStats == null ) { return null ; } VoltTable [ ] stats = new VoltTable [ 9 ] ; stats [ 0 ] = mStats [ 0 ] ; stats [ 1 ] = iStats [ 0 ] ; stats [ 2 ] = pStats [ 0 ] ; stats [ 3 ] = ioStats [ 0 ] ; stats [ 4 ] = tStats [ 0 ] ; stats [ 5 ] = indStats [ 0 ] ; stats [ 6 ] = sStats [ 0 ] ; stats [ 7 ] = cStats [ 0 ] ; stats [ 8 ] = qStats [ 0 ] ; return stats ;
|
public class AssetsApi { /** * Get character asset locations ( asynchronously ) Return locations for a set
* of item ids , which you can get from character assets endpoint .
* Coordinates for items in hangars or stations are set to ( 0,0,0 ) - - - SSO
* Scope : esi - assets . read _ assets . v1
* @ param characterId
* An EVE character ID ( required )
* @ param requestBody
* A list of item ids ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ param callback
* The callback to be executed when the API call finishes
* @ return The request call
* @ throws ApiException
* If fail to process the API call , e . g . serializing the request
* body object */
public com . squareup . okhttp . Call postCharactersCharacterIdAssetsLocationsAsync ( Integer characterId , List < Long > requestBody , String datasource , String token , final ApiCallback < List < CharacterAssetsLocationsResponse > > callback ) throws ApiException { } }
|
com . squareup . okhttp . Call call = postCharactersCharacterIdAssetsLocationsValidateBeforeCall ( characterId , requestBody , datasource , token , callback ) ; Type localVarReturnType = new TypeToken < List < CharacterAssetsLocationsResponse > > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
|
public class Unmarshaller { /** * Unmarshals the embedded field represented by the given embedded metadata .
* @ param embeddedMetadata
* the embedded metadata
* @ param target
* the target object that needs to be updated
* @ throws Throwable
* propagated */
private void unmarshalWithExplodedStrategy ( EmbeddedMetadata embeddedMetadata , Object target ) throws Throwable { } }
|
Object embeddedObject = initializeEmbedded ( embeddedMetadata , target ) ; for ( PropertyMetadata propertyMetadata : embeddedMetadata . getPropertyMetadataCollection ( ) ) { unmarshalProperty ( propertyMetadata , embeddedObject ) ; } for ( EmbeddedMetadata embeddedMetadata2 : embeddedMetadata . getEmbeddedMetadataCollection ( ) ) { unmarshalWithExplodedStrategy ( embeddedMetadata2 , embeddedObject ) ; } ConstructorMetadata constructorMetadata = embeddedMetadata . getConstructorMetadata ( ) ; if ( constructorMetadata . isBuilderConstructionStrategy ( ) ) { embeddedObject = constructorMetadata . getBuildMethodHandle ( ) . invoke ( embeddedObject ) ; } embeddedMetadata . getWriteMethod ( ) . invoke ( target , embeddedObject ) ;
|
public class CmsAttributeHandler { /** * Inserts an entity value after the given reference . < p >
* @ param value the entity value
* @ param reference the reference */
private void insertValueAfterReference ( CmsEntity value , CmsAttributeValueView reference ) { } }
|
int valueIndex = - 1 ; if ( reference . getElement ( ) . getNextSiblingElement ( ) == null ) { m_entity . addAttributeValue ( m_attributeName , value ) ; } else { valueIndex = reference . getValueIndex ( ) + 1 ; m_entity . insertAttributeValue ( m_attributeName , value , valueIndex ) ; } CmsAttributeValueView valueWidget = reference ; if ( reference . hasValue ( ) ) { valueWidget = new CmsAttributeValueView ( this , m_widgetService . getAttributeLabel ( m_attributeName ) , m_widgetService . getAttributeHelp ( m_attributeName ) ) ; CmsRenderer . setAttributeChoice ( m_widgetService , valueWidget , getAttributeType ( ) ) ; if ( valueIndex == - 1 ) { ( ( FlowPanel ) reference . getParent ( ) ) . add ( valueWidget ) ; m_attributeValueViews . remove ( valueWidget ) ; m_attributeValueViews . add ( valueWidget ) ; } else { ( ( FlowPanel ) reference . getParent ( ) ) . insert ( valueWidget , valueIndex ) ; m_attributeValueViews . remove ( valueWidget ) ; m_attributeValueViews . add ( valueIndex , valueWidget ) ; m_widgetService . addChangedOrderPath ( getSimplePath ( - 1 ) ) ; } } valueIndex = valueWidget . getValueIndex ( ) ; insertHandlers ( valueIndex ) ; I_CmsEntityRenderer renderer = m_widgetService . getRendererForAttribute ( m_attributeName , getAttributeType ( ) ) ; valueWidget . setValueEntity ( renderer , value ) ;
|
public class ApiOvhAuth { /** * Request a new credential for your application
* REST : POST / auth / credential
* @ param accessRules [ required ] Access required for your application
* @ param redirection [ required ] Where you want to redirect the user after sucessfull authentication */
public net . minidev . ovh . api . auth . OvhCredential credential_POST ( OvhAccessRule [ ] accessRules , String redirection ) throws IOException { } }
|
String qPath = "/auth/credential" ; StringBuilder sb = path ( qPath ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "accessRules" , accessRules ) ; addBody ( o , "redirection" , redirection ) ; String resp = execN ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , net . minidev . ovh . api . auth . OvhCredential . class ) ;
|
public class ListApi { /** * Get string value by path .
* @ param list subject
* @ param path nodes to walk in map
* @ return value */
public static Optional < String > getString ( final List list , final Integer ... path ) { } }
|
return get ( list , String . class , path ) ;
|
public class PutInstructionFileRequest { /** * Creates and returns a { @ link PutObjectRequest } for the instruction file
* with the specified suffix . */
public PutObjectRequest createPutObjectRequest ( S3Object s3Object ) { } }
|
if ( ! s3Object . getBucketName ( ) . equals ( s3ObjectId . getBucket ( ) ) || ! s3Object . getKey ( ) . equals ( s3ObjectId . getKey ( ) ) ) { throw new IllegalArgumentException ( "s3Object passed inconsistent with the instruction file being created" ) ; } InstructionFileId ifid = s3ObjectId . instructionFileId ( suffix ) ; // ObjectMetadata metadata = s3Object . getObjectMetadata ( ) ;
return new PutObjectRequest ( ifid . getBucket ( ) , ifid . getKey ( ) , redirectLocation ) . withAccessControlList ( accessControlList ) . withCannedAcl ( cannedAcl ) // . withFile ( file )
// . withInputStream ( inputStream )
// don ' t want the metadata for the new instruction file
// . withMetadata ( metadata = = null ? null : metadata . clone ( ) )
. withStorageClass ( storageClass ) . withGeneralProgressListener ( getGeneralProgressListener ( ) ) . withRequestMetricCollector ( getRequestMetricCollector ( ) ) ;
|
public class KeyChecker { /** * Verifies the key usage extension in a CA cert .
* The key usage extension , if present , must assert the keyCertSign bit .
* The extended key usage extension is not checked ( see CR 4776794 for
* more information ) . */
static void verifyCAKeyUsage ( X509Certificate cert ) throws CertPathValidatorException { } }
|
String msg = "CA key usage" ; if ( debug != null ) { debug . println ( "KeyChecker.verifyCAKeyUsage() ---checking " + msg + "..." ) ; } boolean [ ] keyUsageBits = cert . getKeyUsage ( ) ; // getKeyUsage returns null if the KeyUsage extension is not present
// in the certificate - in which case there is nothing to check
if ( keyUsageBits == null ) { return ; } // throw an exception if the keyCertSign bit is not set
if ( ! keyUsageBits [ KEY_CERT_SIGN ] ) { throw new CertPathValidatorException ( msg + " check failed: keyCertSign bit is not set" , null , null , - 1 , PKIXReason . INVALID_KEY_USAGE ) ; } if ( debug != null ) { debug . println ( "KeyChecker.verifyCAKeyUsage() " + msg + " verified." ) ; }
|
public class InterleavedU8 { /** * Returns an integer formed from 4 bands . a [ i ] < < 24 | a [ i + 1 ] < < 16 | a [ i + 2 ] < < 8 | a [ 3]
* @ param x column
* @ param y row
* @ return 32 bit integer */
public int get32 ( int x , int y ) { } }
|
int i = startIndex + y * stride + x * 4 ; return ( ( data [ i ] & 0xFF ) << 24 ) | ( ( data [ i + 1 ] & 0xFF ) << 16 ) | ( ( data [ i + 2 ] & 0xFF ) << 8 ) | ( data [ i + 3 ] & 0xFF ) ;
|
public class MBeanUtil { /** * Create a proxy to the given mbean
* @ param c
* @ param name
* @ return proxy to the mbean
* @ throws Throwable */
@ SuppressWarnings ( "unchecked" ) public static Object getMBean ( final Class c , final String name ) throws Throwable { } }
|
final MBeanServer server = getMbeanServer ( ) ; // return MBeanProxyExt . create ( c , name , server ) ;
return JMX . newMBeanProxy ( server , new ObjectName ( name ) , c ) ;
|
public class XMLChecker { /** * Determines if the specified character matches the < em > CombiningChar < / em > production .
* See : < a href = " http : / / www . w3 . org / TR / REC - xml # NT - CombiningChar " > Definition of CombiningChar < / a > .
* @ param c the character to check .
* @ return < code > true < / code > if the character matches the production , or < code > false < / code > if
* it does not . */
private static final boolean isCombiningChar ( char c ) { } }
|
int n = c ; return n >= 0x0300 && n <= 0x0345 || n >= 0x0360 && n <= 0x0361 || n >= 0x0483 && n <= 0x0486 || n >= 0x0591 && n <= 0x05A1 || n >= 0x05A3 && n <= 0x05B9 || n >= 0x05BB && n <= 0x05BD || n == 0x05BF || n >= 0x05C1 && n <= 0x05C2 || n == 0x05C4 || n >= 0x064B && n <= 0x0652 || n == 0x0670 || n >= 0x06D6 && n <= 0x06DC || n >= 0x06DD && n <= 0x06DF || n >= 0x06E0 && n <= 0x06E4 || n >= 0x06E7 && n <= 0x06E8 || n >= 0x06EA && n <= 0x06ED || n >= 0x0901 && n <= 0x0903 || n == 0x093C || n >= 0x093E && n <= 0x094C || n == 0x094D || n >= 0x0951 && n <= 0x0954 || n >= 0x0962 && n <= 0x0963 || n >= 0x0981 && n <= 0x0983 || n == 0x09BC || n == 0x09BE || n == 0x09BF || n >= 0x09C0 && n <= 0x09C4 || n >= 0x09C7 && n <= 0x09C8 || n >= 0x09CB && n <= 0x09CD || n == 0x09D7 || n >= 0x09E2 && n <= 0x09E3 || n == 0x0A02 || n == 0x0A3C || n == 0x0A3E || n == 0x0A3F || n >= 0x0A40 && n <= 0x0A42 || n >= 0x0A47 && n <= 0x0A48 || n >= 0x0A4B && n <= 0x0A4D || n >= 0x0A70 && n <= 0x0A71 || n >= 0x0A81 && n <= 0x0A83 || n == 0x0ABC || n >= 0x0ABE && n <= 0x0AC5 || n >= 0x0AC7 && n <= 0x0AC9 || n >= 0x0ACB && n <= 0x0ACD || n >= 0x0B01 && n <= 0x0B03 || n == 0x0B3C || n >= 0x0B3E && n <= 0x0B43 || n >= 0x0B47 && n <= 0x0B48 || n >= 0x0B4B && n <= 0x0B4D || n >= 0x0B56 && n <= 0x0B57 || n >= 0x0B82 && n <= 0x0B83 || n >= 0x0BBE && n <= 0x0BC2 || n >= 0x0BC6 && n <= 0x0BC8 || n >= 0x0BCA && n <= 0x0BCD || n == 0x0BD7 || n >= 0x0C01 && n <= 0x0C03 || n >= 0x0C3E && n <= 0x0C44 || n >= 0x0C46 && n <= 0x0C48 || n >= 0x0C4A && n <= 0x0C4D || n >= 0x0C55 && n <= 0x0C56 || n >= 0x0C82 && n <= 0x0C83 || n >= 0x0CBE && n <= 0x0CC4 || n >= 0x0CC6 && n <= 0x0CC8 || n >= 0x0CCA && n <= 0x0CCD || n >= 0x0CD5 && n <= 0x0CD6 || n >= 0x0D02 && n <= 0x0D03 || n >= 0x0D3E && n <= 0x0D43 || n >= 0x0D46 && n <= 0x0D48 || n >= 0x0D4A && n <= 0x0D4D || n == 0x0D57 || n == 0x0E31 || n >= 0x0E34 && n <= 0x0E3A || n >= 0x0E47 && n <= 0x0E4E || n == 0x0EB1 || n >= 0x0EB4 && n <= 0x0EB9 || n >= 0x0EBB && n <= 0x0EBC || n >= 0x0EC8 && n <= 0x0ECD || n >= 0x0F18 && n <= 0x0F19 || n == 0x0F35 || n == 0x0F37 || n == 0x0F39 || n == 0x0F3E || n == 0x0F3F || n >= 0x0F71 && n <= 0x0F84 || n >= 0x0F86 && n <= 0x0F8B || n >= 0x0F90 && n <= 0x0F95 || n == 0x0F97 || n >= 0x0F99 && n <= 0x0FAD || n >= 0x0FB1 && n <= 0x0FB7 || n == 0x0FB9 || n >= 0x20D0 && n <= 0x20DC || n == 0x20E1 || n >= 0x302A && n <= 0x302F || n == 0x3099 || n == 0x309A ;
|
public class UrlUtilities { /** * Get content from the passed in URL . This code will open a connection to
* the passed in server , fetch the requested content , and return it as a
* byte [ ] .
* @ param url URL to hit
* @ param inCookies Map of session cookies ( or null if not needed )
* @ param outCookies Map of session cookies ( or null if not needed )
* @ param ignoreSec if true , SSL connection will always be trusted .
* @ return byte [ ] of content fetched from URL . */
public static byte [ ] getContentFromUrl ( String url , Map inCookies , Map outCookies , boolean allowAllCerts ) { } }
|
try { return getContentFromUrl ( getActualUrl ( url ) , inCookies , outCookies , allowAllCerts ) ; } catch ( Exception e ) { LOG . warn ( "Exception occurred fetching content from url: " + url , e ) ; return null ; }
|
public class TextRowProtocol { /** * Get ZonedDateTime format from raw text format .
* @ param columnInfo column information
* @ param clazz class for logging
* @ param timeZone time zone
* @ return ZonedDateTime value
* @ throws SQLException if column type doesn ' t permit conversion */
public ZonedDateTime getInternalZonedDateTime ( ColumnInformation columnInfo , Class clazz , TimeZone timeZone ) throws SQLException { } }
|
if ( lastValueWasNull ( ) ) { return null ; } if ( length == 0 ) { lastValueNull |= BIT_LAST_FIELD_NULL ; return null ; } String raw = new String ( buf , pos , length , StandardCharsets . UTF_8 ) ; switch ( columnInfo . getColumnType ( ) . getSqlType ( ) ) { case Types . TIMESTAMP : if ( raw . startsWith ( "0000-00-00 00:00:00" ) ) { return null ; } try { LocalDateTime localDateTime = LocalDateTime . parse ( raw , TEXT_LOCAL_DATE_TIME . withZone ( timeZone . toZoneId ( ) ) ) ; return ZonedDateTime . of ( localDateTime , timeZone . toZoneId ( ) ) ; } catch ( DateTimeParseException dateParserEx ) { throw new SQLException ( raw + " cannot be parse as LocalDateTime. time must have \"yyyy-MM-dd HH:mm:ss[.S]\" format" ) ; } case Types . VARCHAR : case Types . LONGVARCHAR : case Types . CHAR : if ( raw . startsWith ( "0000-00-00 00:00:00" ) ) { return null ; } try { return ZonedDateTime . parse ( raw , TEXT_ZONED_DATE_TIME ) ; } catch ( DateTimeParseException dateParserEx ) { throw new SQLException ( raw + " cannot be parse as ZonedDateTime. time must have \"yyyy-MM-dd[T/ ]HH:mm:ss[.S]\" " + "with offset and timezone format (example : '2011-12-03 10:15:30+01:00[Europe/Paris]')" ) ; } default : throw new SQLException ( "Cannot read " + clazz . getName ( ) + " using a " + columnInfo . getColumnType ( ) . getJavaTypeName ( ) + " field" ) ; }
|
public class AbsAxis { /** * Adding any AtomicVal to any ItemList staticly .
* @ param pRtx
* as key
* @ param pVal
* to be added
* @ return the index in the ItemList */
public static int addAtomicToItemList ( final INodeReadTrx pRtx , final AtomicValue pVal ) { } }
|
if ( ! atomics . containsKey ( pRtx ) ) { atomics . put ( pRtx , new ItemList ( ) ) ; } return atomics . get ( pRtx ) . addItem ( pVal ) ;
|
public class ProxyInvocationHandlerImpl { /** * Checks the connector status before a method call is executed . Instantly returns True if the connector already
* finished successfully , otherwise it will block up to the amount of milliseconds defined by the
* arbitrationTimeout or until the ProxyInvocationHandler is notified about a successful connection .
* @ return True if the connector was finished successfully in time , False if the connector failed or could not be
* finished in time .
* @ throws InterruptedException in case thread is interrupted */
public boolean waitForConnectorFinished ( ) throws InterruptedException { } }
|
connectorStatusLock . lock ( ) ; try { if ( connectorStatus == ConnectorStatus . ConnectorSuccesful ) { return true ; } return connectorSuccessfullyFinished . await ( discoveryQos . getDiscoveryTimeoutMs ( ) , TimeUnit . MILLISECONDS ) ; } finally { connectorStatusLock . unlock ( ) ; }
|
public class FileUtil { /** * Reads the contents of a file into a byte array .
* @ param file The < code > File < / code > to read .
* @ return A byte array containing the file ' s contents .
* @ throws IOException If an error occurred while reading the file . */
public static byte [ ] getFileContents ( File file ) throws IOException { } }
|
FileInputStream stream = new FileInputStream ( file ) ; byte [ ] contents = new byte [ ( int ) file . length ( ) ] ; stream . read ( contents ) ; stream . close ( ) ; return contents ;
|
public class HTAccessHandler { public void handle ( String pathInContext , String pathParams , HttpRequest request , HttpResponse response ) throws HttpException , IOException { } }
|
String user = null ; String password = null ; boolean IPValid = true ; if ( log . isDebugEnabled ( ) ) log . debug ( "HTAccessHandler pathInContext=" + pathInContext ) ; String credentials = request . getField ( HttpFields . __Authorization ) ; if ( credentials != null ) { credentials = credentials . substring ( credentials . indexOf ( ' ' ) + 1 ) ; credentials = B64Code . decode ( credentials , StringUtil . __ISO_8859_1 ) ; int i = credentials . indexOf ( ':' ) ; user = credentials . substring ( 0 , i ) ; password = credentials . substring ( i + 1 ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "User=" + user + ", password=" + "******************************" . substring ( 0 , password . length ( ) ) ) ; } HTAccess ht = null ; try { Resource resource = null ; String directory = pathInContext . endsWith ( "/" ) ? pathInContext : URI . parentPath ( pathInContext ) ; // Look for htAccess resource
while ( directory != null ) { String htPath = directory + _accessFile ; resource = getHttpContext ( ) . getResource ( htPath ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "directory=" + directory + " resource=" + resource ) ; if ( resource != null && resource . exists ( ) && ! resource . isDirectory ( ) ) break ; resource = null ; directory = URI . parentPath ( directory ) ; } // Try default directory
if ( resource == null && _default != null ) { resource = Resource . newResource ( _default ) ; if ( ! resource . exists ( ) || resource . isDirectory ( ) ) return ; } if ( resource == null ) return ; if ( log . isDebugEnabled ( ) ) log . debug ( "HTACCESS=" + resource ) ; ht = ( HTAccess ) _htCache . get ( resource ) ; if ( ht == null || ht . getLastModified ( ) != resource . lastModified ( ) ) { ht = new HTAccess ( resource ) ; _htCache . put ( resource , ht ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "HTCache loaded " + ht ) ; } // prevent access to htaccess files
if ( pathInContext . endsWith ( _accessFile ) ) { response . sendError ( HttpResponse . __403_Forbidden ) ; request . setHandled ( true ) ; return ; } // See if there is a config problem
if ( ht . isForbidden ( ) ) { log . warn ( "Mis-configured htaccess: " + ht ) ; response . sendError ( HttpResponse . __403_Forbidden ) ; request . setHandled ( true ) ; return ; } // first see if we need to handle based on method type
Map methods = ht . getMethods ( ) ; if ( methods . size ( ) > 0 && ! methods . containsKey ( request . getMethod ( ) ) ) return ; // Nothing to check
// Check the accesss
int satisfy = ht . getSatisfy ( ) ; // second check IP address
IPValid = ht . checkAccess ( "" , request . getRemoteAddr ( ) ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "IPValid = " + IPValid ) ; // If IP is correct and satify is ANY then access is allowed
if ( IPValid == true && satisfy == HTAccess . ANY ) return ; // If IP is NOT correct and satify is ALL then access is forbidden
if ( IPValid == false && satisfy == HTAccess . ALL ) { response . sendError ( HttpResponse . __403_Forbidden ) ; request . setHandled ( true ) ; return ; } // set required page
if ( ! ht . checkAuth ( user , password , getHttpContext ( ) , request ) ) { log . debug ( "Auth Failed" ) ; response . setField ( HttpFields . __WwwAuthenticate , "basic realm=" + ht . getName ( ) ) ; response . sendError ( HttpResponse . __401_Unauthorized ) ; response . commit ( ) ; request . setHandled ( true ) ; return ; } // set user
if ( user != null ) { request . setAuthType ( SecurityConstraint . __BASIC_AUTH ) ; request . setAuthUser ( user ) ; } } catch ( Exception ex ) { log . warn ( LogSupport . EXCEPTION , ex ) ; if ( ht != null ) { response . sendError ( HttpResponse . __500_Internal_Server_Error ) ; request . setHandled ( true ) ; } }
|
public class PrcWageTaxLineSave { /** * < p > Process entity request . < / p >
* @ param pAddParam additional param , e . g . return this line ' s
* document in " nextEntity " for farther process
* @ param pRequestData Request Data
* @ param pEntity Entity to process
* @ return Entity processed for farther process or null
* @ throws Exception - an exception */
@ Override public final WageTaxLine process ( final Map < String , Object > pAddParam , final WageTaxLine pEntity , final IRequestData pRequestData ) throws Exception { } }
|
if ( pEntity . getItsTotal ( ) . doubleValue ( ) == 0d ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "total_is_0" ) ; } if ( pEntity . getItsPercentage ( ) . doubleValue ( ) <= 0d ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "percentage_less_eq_0" ) ; } // Beige - Orm refresh :
pEntity . setItsOwner ( getSrvOrm ( ) . retrieveEntity ( pAddParam , pEntity . getItsOwner ( ) ) ) ; // optimistic locking ( dirty check ) :
Long ownerVersion = Long . valueOf ( pRequestData . getParameter ( Wage . class . getSimpleName ( ) + ".ownerVersion" ) ) ; pEntity . getItsOwner ( ) . setItsVersion ( ownerVersion ) ; if ( pEntity . getItsOwner ( ) . getHasMadeAccEntries ( ) ) { throw new ExceptionWithCode ( ExceptionWithCode . FORBIDDEN , "attempt_to_change_accounted_document" ) ; } BigDecimal taxDue = pEntity . getItsOwner ( ) . getItsTotal ( ) . subtract ( pEntity . getAllowance ( ) ) . multiply ( pEntity . getItsPercentage ( ) ) . divide ( BigDecimal . valueOf ( 100 ) , getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getPricePrecision ( ) , getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getRoundingMode ( ) ) . add ( pEntity . getPlusAmount ( ) ) ; if ( Math . abs ( taxDue . doubleValue ( ) - pEntity . getItsTotal ( ) . doubleValue ( ) ) > 1d ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "total_does_not_conform_percentage" ) ; } if ( pEntity . getIsNew ( ) ) { getSrvOrm ( ) . insertEntity ( pAddParam , pEntity ) ; pEntity . setIsNew ( false ) ; } else { getSrvOrm ( ) . updateEntity ( pAddParam , pEntity ) ; } updateOwner ( pAddParam , pEntity ) ; pAddParam . put ( "nextEntity" , pEntity . getItsOwner ( ) ) ; pAddParam . put ( "nameOwnerEntity" , Wage . class . getSimpleName ( ) ) ; return null ;
|
public class SocketRWChannelSelector { /** * @ see
* com . ibm . ws . tcpchannel . internal . ChannelSelector # addWork ( java . lang . Object ) */
@ Override protected void addWork ( Object toAdd ) { } }
|
addToWorkQueue ( toAdd ) ; if ( wakeupNeeded || ( wakeupOption == ValidateUtils . SELECTOR_WAKEUP_IF_NO_FORCE_QUEUE && ( ( TCPBaseRequestContext ) toAdd ) . isForceQueue ( ) == false ) ) { if ( wakeupPending != true ) { wakeupPending = true ; wakeup ( ) ; } }
|
public class RefreshFutures { /** * Await for either future completion or to reach the timeout . Successful / exceptional future completion is not substantial .
* @ param timeout the timeout value .
* @ param timeUnit timeout unit .
* @ param futures { @ link Collection } of { @ literal Future } s .
* @ return time awaited in { @ link TimeUnit # NANOSECONDS } .
* @ throws InterruptedException */
static long awaitAll ( long timeout , TimeUnit timeUnit , Collection < ? extends Future < ? > > futures ) throws InterruptedException { } }
|
long waitTime = 0 ; for ( Future < ? > future : futures ) { long timeoutLeft = timeUnit . toNanos ( timeout ) - waitTime ; if ( timeoutLeft <= 0 ) { break ; } long startWait = System . nanoTime ( ) ; try { future . get ( timeoutLeft , TimeUnit . NANOSECONDS ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw e ; } catch ( Exception e ) { continue ; } finally { waitTime += System . nanoTime ( ) - startWait ; } } return waitTime ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.