signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Operators { /** * Gets a { @ link Lexicon } instance with { @ link Tokens # reserved ( String ) } as each operator ' s value
* and a lexer that strives to try the shortest operator first .
* < p > Safely speaking , we can always start from the longest operator and falls back to shorter
* ones . Yet shorter operators are more often used than longer ones and the scanning of them is
* faster . However , scanning shorter operators first has the chance that a " = = " is mistakenly
* scanned as " = " followed by another " = " . In order to avoid this , we analyze the prefix
* relationship and make sure that prefixes are scanned after prefixes . */
static Lexicon lexicon ( final Collection < String > operatorNames ) { } } | final Map < String , Object > operators = new HashMap < String , Object > ( ) ; final String [ ] ops = sort ( operatorNames . toArray ( new String [ operatorNames . size ( ) ] ) ) ; final Parser < ? > [ ] lexers = new Parser < ? > [ ops . length ] ; for ( int i = 0 ; i < ops . length ; i ++ ) { String s = ops [ i ] ; Parser < ? > scanner = s . length ( ) == 1 ? Scanners . isChar ( s . charAt ( 0 ) ) : Scanners . string ( s ) ; Object value = Tokens . reserved ( s ) ; operators . put ( s , value ) ; lexers [ i ] = scanner . retn ( value ) ; } return new Lexicon ( operators :: get , Parsers . or ( lexers ) ) ; |
public class DmMessageProcessor { /** * ( non - Javadoc )
* @ see net . roboconf . messaging . api . business . AbstractMessageProcessor
* # processMessage ( net . roboconf . messaging . api . messages . Message ) */
@ Override public void processMessage ( Message message ) { } } | if ( message instanceof MsgNotifMachineDown ) processMsgNotifMachineDown ( ( MsgNotifMachineDown ) message ) ; else if ( message instanceof MsgNotifInstanceChanged ) processMsgNotifInstanceChanged ( ( MsgNotifInstanceChanged ) message ) ; else if ( message instanceof MsgNotifInstanceRemoved ) processMsgNotifInstanceRemoved ( ( MsgNotifInstanceRemoved ) message ) ; else if ( message instanceof MsgNotifHeartbeat ) processMsgNotifHeartbeat ( ( MsgNotifHeartbeat ) message ) ; else if ( message instanceof MsgNotifAutonomic ) processMsgMonitoringEvent ( ( MsgNotifAutonomic ) message ) ; else if ( message instanceof MsgEcho ) this . manager . debugMngr ( ) . notifyMsgEchoReceived ( ( MsgEcho ) message ) ; else if ( message instanceof MsgNotifLogs ) processMsgNotifLogs ( ( MsgNotifLogs ) message ) ; else this . logger . warning ( "The DM got an undetermined message to process: " + message . getClass ( ) . getName ( ) ) ; |
public class Integrator { /** * Execute point of Integrator . */
public void execute ( ) throws Exception { } } | // Read the properties file , if it exists .
properties = new Properties ( ) ; if ( propertiesFile != null ) { FileInputStream propertiesStream = null ; try { propertiesStream = new FileInputStream ( propertiesFile ) ; properties . load ( propertiesStream ) ; } catch ( final Exception e ) { throw new RuntimeException ( e ) ; } finally { if ( propertiesStream != null ) { try { propertiesStream . close ( ) ; } catch ( final IOException e ) { logger . error ( e . getMessage ( ) , e ) ; } } } } else { properties . putAll ( Configuration . configuration ) ; } if ( ! properties . containsKey ( CONF_PLUGIN_DIRS ) ) { properties . setProperty ( CONF_PLUGIN_DIRS , configuration . getOrDefault ( CONF_PLUGIN_DIRS , "plugins;demo" ) ) ; } if ( ! properties . containsKey ( CONF_PLUGIN_IGNORES ) ) { properties . setProperty ( CONF_PLUGIN_IGNORES , configuration . getOrDefault ( CONF_PLUGIN_IGNORES , "" ) ) ; } // Get the list of plugin directories from the properties .
final String [ ] pluginDirs = properties . getProperty ( CONF_PLUGIN_DIRS ) . split ( PARAM_VALUE_SEPARATOR ) ; final Set < String > pluginIgnores = new HashSet < > ( ) ; if ( properties . getProperty ( CONF_PLUGIN_IGNORES ) != null ) { pluginIgnores . addAll ( Arrays . asList ( properties . getProperty ( CONF_PLUGIN_IGNORES ) . split ( PARAM_VALUE_SEPARATOR ) ) ) ; } final String pluginOrderProperty = properties . getProperty ( CONF_PLUGIN_ORDER ) ; if ( pluginOrderProperty != null ) { final List < String > plugins = asList ( pluginOrderProperty . trim ( ) . split ( "\\s+" ) ) ; Collections . reverse ( plugins ) ; int priority = 1 ; for ( final String plugin : plugins ) { pluginOrder . put ( plugin , priority ++ ) ; } } for ( final String tmpl : properties . getProperty ( CONF_TEMPLATES , "" ) . split ( PARAM_VALUE_SEPARATOR ) ) { final String t = tmpl . trim ( ) ; if ( t . length ( ) != 0 ) { logger . warn ( MessageUtils . getMessage ( "DOTJ080W" , "templates" , "template" ) . toString ( ) ) ; templateSet . put ( t , null ) ; } } for ( final String pluginDir2 : pluginDirs ) { File pluginDir = new File ( pluginDir2 ) ; if ( ! pluginDir . isAbsolute ( ) ) { pluginDir = new File ( ditaDir , pluginDir . getPath ( ) ) ; } final File [ ] pluginFiles = pluginDir . listFiles ( ) ; for ( int i = 0 ; ( pluginFiles != null ) && ( i < pluginFiles . length ) ; i ++ ) { final File f = pluginFiles [ i ] ; final File descFile = new File ( pluginFiles [ i ] , "plugin.xml" ) ; if ( pluginFiles [ i ] . isDirectory ( ) && ! pluginIgnores . contains ( f . getName ( ) ) && descFile . exists ( ) ) { descSet . add ( descFile ) ; } } } mergePlugins ( ) ; integrate ( ) ; logChanges ( pluginList , getPluginIds ( pluginsDoc ) ) ; |
public class LatLongUtils { /** * Returns a point on the segment nearest to the specified point .
* libGDX ( Apache 2.0) */
public static Point nearestSegmentPoint ( double startX , double startY , double endX , double endY , double pointX , double pointY ) { } } | double xDiff = endX - startX ; double yDiff = endY - startY ; double length2 = xDiff * xDiff + yDiff * yDiff ; if ( length2 == 0 ) return new Point ( startX , startY ) ; double t = ( ( pointX - startX ) * ( endX - startX ) + ( pointY - startY ) * ( endY - startY ) ) / length2 ; if ( t < 0 ) return new Point ( startX , startY ) ; if ( t > 1 ) return new Point ( endX , endY ) ; return new Point ( startX + t * ( endX - startX ) , startY + t * ( endY - startY ) ) ; |
public class Tune { /** * Subfunction of { @ link # transpose ( Tune , int ) } which transpose
* a Chord */
static private void transpose_Chord ( DecorableElement transp , Note noneTranspKeyNote , KeySignature noneTranspKey , Note lastKeyNote , KeySignature lastKey ) { } } | Chord chord = transp . getChord ( ) ; if ( chord != null ) { if ( chord . hasNote ( ) ) chord . setNote ( ( Note ) transpose_Note ( chord . getNote ( ) , noneTranspKeyNote , null , lastKeyNote , null ) ) ; if ( chord . hasBass ( ) ) chord . setBass ( ( Note ) transpose_Note ( chord . getBass ( ) , noneTranspKeyNote , null , lastKeyNote , null ) ) ; } |
public class Setting { /** * 将键值对加入到对应分组中
* @ param group 分组
* @ param key 键
* @ param value 值
* @ return 此key之前存在的值 , 如果没有返回null */
public String put ( String group , String key , String value ) { } } | return this . groupedMap . put ( group , key , value ) ; |
public class FlexiantComputeClient { /** * Deletes the given server .
* @ param server the server to be deleted .
* @ throws FlexiantException */
public void deleteServer ( final de . uniulm . omi . cloudiator . flexiant . client . domain . Server server ) throws FlexiantException { } } | this . deleteServer ( server . getId ( ) ) ; |
public class ge_sub { /** * r = p - q */
public static void ge_sub ( ge_p1p1 r , ge_p3 p , ge_cached q ) { } } | int [ ] t0 = new int [ 10 ] ; // CONVERT # include " ge _ sub . h "
/* qhasm : enter ge _ sub */
/* qhasm : fe X1 */
/* qhasm : fe Y1 */
/* qhasm : fe Z1 */
/* qhasm : fe Z2 */
/* qhasm : fe T1 */
/* qhasm : fe ZZ */
/* qhasm : fe YpX2 */
/* qhasm : fe YmX2 */
/* qhasm : fe T2d2 */
/* qhasm : fe X3 */
/* qhasm : fe Y3 */
/* qhasm : fe Z3 */
/* qhasm : fe T3 */
/* qhasm : fe YpX1 */
/* qhasm : fe YmX1 */
/* qhasm : fe A */
/* qhasm : fe B */
/* qhasm : fe C */
/* qhasm : fe D */
/* qhasm : YpX1 = Y1 + X1 */
/* asm 1 : fe _ add . fe _ add ( > YpX1 = fe # 1 , < Y1 = fe # 12 , < X1 = fe # 11 ) ; */
/* asm 2 : fe _ add . fe _ add ( > YpX1 = r . X , < Y1 = p . Y , < X1 = p . X ) ; */
fe_add . fe_add ( r . X , p . Y , p . X ) ; /* qhasm : YmX1 = Y1 - X1 */
/* asm 1 : fe _ sub . fe _ sub ( > YmX1 = fe # 2 , < Y1 = fe # 12 , < X1 = fe # 11 ) ; */
/* asm 2 : fe _ sub . fe _ sub ( > YmX1 = r . Y , < Y1 = p . Y , < X1 = p . X ) ; */
fe_sub . fe_sub ( r . Y , p . Y , p . X ) ; /* qhasm : A = YpX1 * YmX2 */
/* asm 1 : fe _ mul . fe _ mul ( > A = fe # 3 , < YpX1 = fe # 1 , < YmX2 = fe # 16 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > A = r . Z , < YpX1 = r . X , < YmX2 = q . YminusX ) ; */
fe_mul . fe_mul ( r . Z , r . X , q . YminusX ) ; /* qhasm : B = YmX1 * YpX2 */
/* asm 1 : fe _ mul . fe _ mul ( > B = fe # 2 , < YmX1 = fe # 2 , < YpX2 = fe # 15 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > B = r . Y , < YmX1 = r . Y , < YpX2 = q . YplusX ) ; */
fe_mul . fe_mul ( r . Y , r . Y , q . YplusX ) ; /* qhasm : C = T2d2 * T1 */
/* asm 1 : fe _ mul . fe _ mul ( > C = fe # 4 , < T2d2 = fe # 18 , < T1 = fe # 14 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > C = r . T , < T2d2 = q . T2d , < T1 = p . T ) ; */
fe_mul . fe_mul ( r . T , q . T2d , p . T ) ; /* qhasm : ZZ = Z1 * Z2 */
/* asm 1 : fe _ mul . fe _ mul ( > ZZ = fe # 1 , < Z1 = fe # 13 , < Z2 = fe # 17 ) ; */
/* asm 2 : fe _ mul . fe _ mul ( > ZZ = r . X , < Z1 = p . Z , < Z2 = q . Z ) ; */
fe_mul . fe_mul ( r . X , p . Z , q . Z ) ; /* qhasm : D = 2 * ZZ */
/* asm 1 : fe _ add . fe _ add ( > D = fe # 5 , < ZZ = fe # 1 , < ZZ = fe # 1 ) ; */
/* asm 2 : fe _ add . fe _ add ( > D = t0 , < ZZ = r . X , < ZZ = r . X ) ; */
fe_add . fe_add ( t0 , r . X , r . X ) ; /* qhasm : X3 = A - B */
/* asm 1 : fe _ sub . fe _ sub ( > X3 = fe # 1 , < A = fe # 3 , < B = fe # 2 ) ; */
/* asm 2 : fe _ sub . fe _ sub ( > X3 = r . X , < A = r . Z , < B = r . Y ) ; */
fe_sub . fe_sub ( r . X , r . Z , r . Y ) ; /* qhasm : Y3 = A + B */
/* asm 1 : fe _ add . fe _ add ( > Y3 = fe # 2 , < A = fe # 3 , < B = fe # 2 ) ; */
/* asm 2 : fe _ add . fe _ add ( > Y3 = r . Y , < A = r . Z , < B = r . Y ) ; */
fe_add . fe_add ( r . Y , r . Z , r . Y ) ; /* qhasm : Z3 = D - C */
/* asm 1 : fe _ sub . fe _ sub ( > Z3 = fe # 3 , < D = fe # 5 , < C = fe # 4 ) ; */
/* asm 2 : fe _ sub . fe _ sub ( > Z3 = r . Z , < D = t0 , < C = r . T ) ; */
fe_sub . fe_sub ( r . Z , t0 , r . T ) ; /* qhasm : T3 = D + C */
/* asm 1 : fe _ add . fe _ add ( > T3 = fe # 4 , < D = fe # 5 , < C = fe # 4 ) ; */
/* asm 2 : fe _ add . fe _ add ( > T3 = r . T , < D = t0 , < C = r . T ) ; */
fe_add . fe_add ( r . T , t0 , r . T ) ; /* qhasm : return */ |
public class StreamingManager { /** * 从直播流数据中录制点播 , 该方法录制的时间段为整个流开始和结束时间
* @ param streamKey 流名称
* @ param fileName 录制后保存的文件名 */
public String saveAs ( String streamKey , String fileName ) throws QiniuException { } } | return saveAs ( streamKey , fileName , 0 , 0 ) ; |
public class FileUtil { /** * Given a file inside content it return
* the relative path to get to the root .
* Example : / content and / content / tags / blog will return ' . . / . . '
* @ param sourceFile the file to calculate relative path for
* @ return */
static public String getPathToRoot ( JBakeConfiguration config , File rootPath , File sourceFile ) { } } | Path r = Paths . get ( rootPath . toURI ( ) ) ; Path s = Paths . get ( sourceFile . getParentFile ( ) . toURI ( ) ) ; Path relativePath = s . relativize ( r ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( asPath ( relativePath . toString ( ) ) ) ; if ( config . getUriWithoutExtension ( ) ) { sb . append ( "/.." ) ; } if ( sb . length ( ) > 0 ) { // added as calling logic assumes / at end .
sb . append ( "/" ) ; } return sb . toString ( ) ; |
public class Item { /** * Creates the JSON representation of an Item */
@ Override public String toJSONString ( ) { } } | Gson gson = new GsonBuilder ( ) . excludeFieldsWithoutExposeAnnotation ( ) // . registerTypeAdapter ( Date . class , new DateSerializer ( ) )
// . setDateFormat ( " yyyy - MM - dd ' T ' HH : mm : ss . SSS ' Z ' " )
. create ( ) ; return gson . toJson ( this ) ; |
public class UpdateRuleRequest { /** * An array of < code > RuleUpdate < / code > objects that you want to insert into or delete from a < a > Rule < / a > . For more
* information , see the applicable data types :
* < ul >
* < li >
* < a > RuleUpdate < / a > : Contains < code > Action < / code > and < code > Predicate < / code >
* < / li >
* < li >
* < a > Predicate < / a > : Contains < code > DataId < / code > , < code > Negated < / code > , and < code > Type < / code >
* < / li >
* < li >
* < a > FieldToMatch < / a > : Contains < code > Data < / code > and < code > Type < / code >
* < / li >
* < / ul >
* @ param updates
* An array of < code > RuleUpdate < / code > objects that you want to insert into or delete from a < a > Rule < / a > . For
* more information , see the applicable data types : < / p >
* < ul >
* < li >
* < a > RuleUpdate < / a > : Contains < code > Action < / code > and < code > Predicate < / code >
* < / li >
* < li >
* < a > Predicate < / a > : Contains < code > DataId < / code > , < code > Negated < / code > , and < code > Type < / code >
* < / li >
* < li >
* < a > FieldToMatch < / a > : Contains < code > Data < / code > and < code > Type < / code >
* < / li > */
public void setUpdates ( java . util . Collection < RuleUpdate > updates ) { } } | if ( updates == null ) { this . updates = null ; return ; } this . updates = new java . util . ArrayList < RuleUpdate > ( updates ) ; |
public class WebDavServer { /** * Stops the WebDAV server .
* @ throws ServerLifecycleException If the server could not be stopped for any unexpected reason . */
public synchronized void stop ( ) throws ServerLifecycleException { } } | try { server . stop ( ) ; LOG . info ( "WebDavServer stopped." ) ; } catch ( Exception e ) { throw new ServerLifecycleException ( "Server couldn't be stopped" , e ) ; } |
public class Scheme { /** * Splits the given secret into { @ code n } parts , of which any { @ code k } or more can be combined to
* recover the original secret .
* @ param secret the secret to split
* @ return a map of { @ code n } part IDs and their values */
public Map < Integer , byte [ ] > split ( byte [ ] secret ) { } } | // generate part values
final byte [ ] [ ] values = new byte [ n ] [ secret . length ] ; for ( int i = 0 ; i < secret . length ; i ++ ) { // for each byte , generate a random polynomial , p
final byte [ ] p = GF256 . generate ( random , k - 1 , secret [ i ] ) ; for ( int x = 1 ; x <= n ; x ++ ) { // each part ' s byte is p ( partId )
values [ x - 1 ] [ i ] = GF256 . eval ( p , ( byte ) x ) ; } } // return as a set of objects
final Map < Integer , byte [ ] > parts = new HashMap < > ( n ( ) ) ; for ( int i = 0 ; i < values . length ; i ++ ) { parts . put ( i + 1 , values [ i ] ) ; } return Collections . unmodifiableMap ( parts ) ; |
public class MessageProcessor { /** * Create Connection using the certificate ( used for SSL )
* @ param certificate
* @ return Connection
* @ throws SIAuthenticationException
* @ throws SIResourceException
* @ throws SINotAuthorizedException */
private SICoreConnection createConnection ( Certificate [ ] certificate ) throws SIAuthenticationException , SIResourceException , SINotAuthorizedException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnFactory . tc . isEntryEnabled ( ) ) { SibTr . entry ( CoreSPIConnFactory . tc , "createConnection" , certificate ) ; } // Check that the Messaging Engine is started
checkStarted ( ) ; Subject subject = null ; try { subject = _authentication . login ( certificate ) ; } catch ( MessagingAuthenticationException e ) { throw new SIAuthenticationException ( e . getMessage ( ) ) ; } SICoreConnection connection = createConnection ( subject , null ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnFactory . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnFactory . tc , "createConnection" , connection ) ; return connection ; |
public class ViewHelper { /** * Equivalent to calling TextView . setText
* @ param cacheView The cache of views to get the view from
* @ param viewId The id of the view whose text should change
* @ param resid The new text for the view */
public static void setText ( EfficientCacheView cacheView , int viewId , @ StringRes int resid ) { } } | View view = cacheView . findViewByIdEfficient ( viewId ) ; if ( view instanceof TextView ) { ( ( TextView ) view ) . setText ( resid ) ; } |
public class ExpandableExtension { /** * select all items
* @ param considerSelectableFlag true if the select method should not select an item if its not selectable */
public void select ( boolean considerSelectableFlag ) { } } | SelectExtension < Item > selectExtension = mFastAdapter . getExtension ( SelectExtension . class ) ; if ( selectExtension == null ) { return ; } for ( Item item : AdapterUtil . getAllItems ( mFastAdapter ) ) { selectExtension . select ( item , considerSelectableFlag ) ; } mFastAdapter . notifyDataSetChanged ( ) ; |
public class LBiLongConsumerBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */
@ Nonnull public static LBiLongConsumer biLongConsumerFrom ( Consumer < LBiLongConsumerBuilder > buildingFunction ) { } } | LBiLongConsumerBuilder builder = new LBiLongConsumerBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ; |
public class CircularBar { /** * Initializes the paints used for the bars */
private void initializePainters ( ) { } } | mClockwiseReachedArcPaint = new Paint ( Paint . ANTI_ALIAS_FLAG ) ; mClockwiseReachedArcPaint . setColor ( mClockwiseArcColor ) ; mClockwiseReachedArcPaint . setAntiAlias ( true ) ; mClockwiseReachedArcPaint . setStrokeWidth ( mClockwiseReachedArcWidth ) ; mClockwiseReachedArcPaint . setStyle ( Paint . Style . STROKE ) ; mCounterClockwiseReachedArcPaint = new Paint ( Paint . ANTI_ALIAS_FLAG ) ; mCounterClockwiseReachedArcPaint . setColor ( mCounterClockwiseArcColor ) ; mCounterClockwiseReachedArcPaint . setAntiAlias ( true ) ; mCounterClockwiseReachedArcPaint . setStrokeWidth ( mCounterClockwiseReachedArcWidth ) ; mCounterClockwiseReachedArcPaint . setStyle ( Paint . Style . STROKE ) ; mClockwiseOutlineArcPaint = new Paint ( Paint . ANTI_ALIAS_FLAG ) ; mClockwiseOutlineArcPaint . setColor ( mClockwiseOutlineArcColor ) ; mClockwiseOutlineArcPaint . setAntiAlias ( true ) ; mClockwiseOutlineArcPaint . setStrokeWidth ( mClockwiseOutlineArcWidth ) ; mClockwiseOutlineArcPaint . setStyle ( Paint . Style . STROKE ) ; mCounterClockwiseOutlineArcPaint = new Paint ( Paint . ANTI_ALIAS_FLAG ) ; mCounterClockwiseOutlineArcPaint . setColor ( mCounterClockwiseOutlineArcColor ) ; mCounterClockwiseOutlineArcPaint . setAntiAlias ( true ) ; mCounterClockwiseOutlineArcPaint . setStrokeWidth ( mCounterClockwiseOutlineArcWidth ) ; mCounterClockwiseOutlineArcPaint . setStyle ( Paint . Style . STROKE ) ; mCircleFillPaint = new Paint ( Paint . ANTI_ALIAS_FLAG ) ; mCircleFillPaint . setColor ( mCircleFillColor ) ; mCircleFillPaint . setAntiAlias ( true ) ; mCircleFillPaint . setStyle ( Paint . Style . FILL ) ; // Defaults
mReachedArcPaint = mClockwiseReachedArcPaint ; mOutlineArcPaint = mClockwiseOutlineArcPaint ; |
public class MeshGenerator { /** * Builds the mesh information into a { @ link com . flowpowered . caustic . api . data . VertexData } which can then be uploaded and rendered . The { @ code sizes } parameter is used to control which information to
* use and generate . The component represents the number of float components per vertex for the attribute .
* The position are added if the x component of { @ code sizes } is non zero and list isn ' t null .
* The normals are added if the y component of { @ code sizes } is non zero and the list isn ' t null . If the list is null but the component is non zero , they are generated based on the position
* information if it ' s available ( the list is not null ) and then added . If the y component is zero , they are ignored . See { @ link # generateNormals ( gnu . trove . list . TFloatList ,
* gnu . trove . list . TIntList , gnu . trove . list . TFloatList ) } .
* The texture coordinates are added if the z component of { @ code sizes } is non zero and list isn ' t null .
* Tangents are generated from all the previous information if it ' s available ( lists are not null ) and the w component of { @ code sizes } is non zero . See { @ link
* # generateTangents ( gnu . trove . list . TFloatList , gnu . trove . list . TFloatList , gnu . trove . list . TFloatList , gnu . trove . list . TIntList , gnu . trove . list . TFloatList ) } .
* Indices are always added and are required .
* @ param sizes Each component represents the number of float components per vertex for the attribute , with x for positions , y for normals , z for texture coords , and w for tangents
* @ param positions The list of position data
* @ param normals The list of normal data
* @ param textureCoords The list of texture coordinate data
* @ param indices The list of indices
* @ return The vertex data */
public static VertexData buildMesh ( Vector4i sizes , TFloatList positions , TFloatList normals , TFloatList textureCoords , TIntList indices ) { } } | final VertexData vertexData = new VertexData ( ) ; int index = 0 ; // Positions
if ( positions != null && sizes . getX ( ) > 0 ) { final VertexAttribute positionAttribute = new VertexAttribute ( "positions" , DataType . FLOAT , sizes . getX ( ) ) ; positionAttribute . setData ( positions ) ; vertexData . addAttribute ( index ++ , positionAttribute ) ; } // Normals
if ( sizes . getY ( ) > 0 ) { if ( normals != null ) { final VertexAttribute normalAttribute = new VertexAttribute ( "normals" , DataType . FLOAT , sizes . getY ( ) ) ; normalAttribute . setData ( normals ) ; vertexData . addAttribute ( index ++ , normalAttribute ) ; } else if ( positions != null ) { final VertexAttribute normalAttribute = new VertexAttribute ( "normals" , DataType . FLOAT , 3 ) ; normals = new TFloatArrayList ( ) ; generateNormals ( positions , indices , normals ) ; normalAttribute . setData ( normals ) ; vertexData . addAttribute ( index ++ , normalAttribute ) ; } } // Texture coordinates
if ( textureCoords != null && sizes . getZ ( ) > 0 ) { final VertexAttribute textureCoordAttribute = new VertexAttribute ( "textureCoords" , DataType . FLOAT , sizes . getZ ( ) ) ; textureCoordAttribute . setData ( textureCoords ) ; vertexData . addAttribute ( index ++ , textureCoordAttribute ) ; } // Tangents
if ( positions != null && textureCoords != null && normals != null && sizes . getW ( ) > 0 ) { final VertexAttribute tangentAttribute = new VertexAttribute ( "tangents" , DataType . FLOAT , 4 ) ; tangentAttribute . setData ( generateTangents ( positions , normals , textureCoords , indices ) ) ; vertexData . addAttribute ( index , tangentAttribute ) ; } // Indices
vertexData . getIndices ( ) . addAll ( indices ) ; return vertexData ; |
public class TreeMap { /** * Gets the entry for the least key greater than the specified
* key ; if no such entry exists , returns the entry for the least
* key greater than the specified key ; if no such entry exists
* returns { @ code null } . */
final TreeMapEntry < K , V > getHigherEntry ( K key ) { } } | TreeMapEntry < K , V > p = root ; while ( p != null ) { int cmp = compare ( key , p . key ) ; if ( cmp < 0 ) { if ( p . left != null ) p = p . left ; else return p ; } else { if ( p . right != null ) { p = p . right ; } else { TreeMapEntry < K , V > parent = p . parent ; TreeMapEntry < K , V > ch = p ; while ( parent != null && ch == parent . right ) { ch = parent ; parent = parent . parent ; } return parent ; } } } return null ; |
public class JaxWsSoapContextHandler { /** * Captures pertinent information from SOAP messages exchanged by the SOAP
* service this handler is attached to . Also responsible for placing custom
* ( implicit ) SOAP headers on outgoing messages .
* @ see SOAPHandler # handleMessage ( MessageContext )
* @ param context the context of the SOAP message passing through this handler
* @ return whether this SOAP interaction should continue */
@ Override public boolean handleMessage ( SOAPMessageContext context ) { } } | if ( ( Boolean ) context . get ( MessageContext . MESSAGE_OUTBOUND_PROPERTY ) ) { // Outbound message ( request ) , so reset the last request and response builders .
lastRequestInfo = new RequestInfo . Builder ( ) ; lastResponseInfo = new ResponseInfo . Builder ( ) ; SOAPMessage soapMessage = context . getMessage ( ) ; try { SOAPHeader soapHeader = soapMessage . getSOAPHeader ( ) ; if ( soapHeader == null ) { soapHeader = soapMessage . getSOAPPart ( ) . getEnvelope ( ) . addHeader ( ) ; } for ( SOAPElement header : soapHeaders ) { soapHeader . addChildElement ( header ) ; } } catch ( SOAPException e ) { throw new ServiceException ( "Error setting SOAP headers on outbound message." , e ) ; } captureServiceAndOperationNames ( context ) ; } captureSoapXml ( context ) ; return true ; |
public class DocFile { /** * Reads from an input stream opened from a given file into a given buffer .
* If an IOException occurs , it is wrapped in a DocFileIOException .
* @ param inFile the file for the stream
* @ param input the stream
* @ param buf the buffer
* @ return the number of bytes read , or - 1 if at end of file
* @ throws DocFileIOException if an exception occurred while reading the stream */
private static int read ( DocFile inFile , InputStream input , byte [ ] buf ) throws DocFileIOException { } } | try { return input . read ( buf ) ; } catch ( IOException e ) { throw new DocFileIOException ( inFile , DocFileIOException . Mode . READ , e ) ; } |
public class HelpFormatter { /** * Appends the usage clause for an Option to a StringBuilder .
* @ param sb the StringBuilder to append to
* @ param option the Option to append
* @ param required whether the Option is required or not */
private void appendOption ( StringBuilder sb , Option option , boolean required ) { } } | if ( ! required ) { sb . append ( OPTIONAL_BRACKET_OPEN ) ; } if ( option . getName ( ) != null ) { sb . append ( OPTION_PREFIX ) . append ( option . getName ( ) ) ; } else { sb . append ( LONG_OPTION_PREFIX ) . append ( option . getLongName ( ) ) ; } // if the Option has a value and a non blank arg name
if ( option . hasValue ( ) && ( option . getValueName ( ) == null || ! option . getValueName ( ) . isEmpty ( ) ) ) { sb . append ( option . isWithEqualSign ( ) ? '=' : LONG_OPTION_SEPARATOR ) ; sb . append ( ARG_BRACKET_OPEN ) . append ( option . getValueName ( ) != null ? option . getValueName ( ) : getArgName ( ) ) . append ( ARG_BRACKET_CLOSE ) ; } if ( ! required ) { sb . append ( OPTIONAL_BRACKET_CLOSE ) ; } |
public class DescribeRdsDbInstancesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeRdsDbInstancesRequest describeRdsDbInstancesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeRdsDbInstancesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeRdsDbInstancesRequest . getStackId ( ) , STACKID_BINDING ) ; protocolMarshaller . marshall ( describeRdsDbInstancesRequest . getRdsDbInstanceArns ( ) , RDSDBINSTANCEARNS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Feature { /** * { @ inheritDoc } */
@ Override public JSONObject toJSON ( ) throws JSONException { } } | JSONObject json = super . toJSON ( ) ; json . put ( JSON_ID , this . mIdentifier ) ; if ( this . mGeometry != null ) { json . put ( JSON_GEOMETRY , this . mGeometry . toJSON ( ) ) ; } else { json . put ( JSON_GEOMETRY , JSONObject . NULL ) ; } if ( this . mProperties != null ) { json . put ( JSON_PROPERTIES , this . mProperties ) ; } else { json . put ( JSON_PROPERTIES , JSONObject . NULL ) ; } return json ; |
public class BackendServiceClient { /** * Creates a BackendService resource in the specified project using the data included in the
* request . There are several restrictions and guidelines to keep in mind when creating a backend
* service . Read Restrictions and Guidelines for more information .
* < p > Sample code :
* < pre > < code >
* try ( BackendServiceClient backendServiceClient = BackendServiceClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* BackendService backendServiceResource = BackendService . newBuilder ( ) . build ( ) ;
* Operation response = backendServiceClient . insertBackendService ( project . toString ( ) , backendServiceResource ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ param backendServiceResource A BackendService resource . This resource defines a group of
* backend virtual machines and their serving capacity . ( = = resource _ for v1 . backendService = = )
* ( = = resource _ for beta . backendService = = )
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation insertBackendService ( String project , BackendService backendServiceResource ) { } } | InsertBackendServiceHttpRequest request = InsertBackendServiceHttpRequest . newBuilder ( ) . setProject ( project ) . setBackendServiceResource ( backendServiceResource ) . build ( ) ; return insertBackendService ( request ) ; |
public class SDVariable { /** * See { @ link # mul ( String , SDVariable ) } */
public SDVariable mul ( SDVariable x ) { } } | return mul ( sameDiff . generateNewVarName ( MulOp . OP_NAME , 0 ) , x ) ; |
public class Element { /** * Sets the URL where more information about this element can be found .
* @ param url the URL as a String
* @ throws IllegalArgumentException if the URL is not a well - formed URL */
public void setUrl ( String url ) { } } | if ( url != null && url . trim ( ) . length ( ) > 0 ) { if ( Url . isUrl ( url ) ) { this . url = url ; } else { throw new IllegalArgumentException ( url + " is not a valid URL." ) ; } } |
public class LogReader { /** * Reports corruption to the monitor .
* The buffer must be updated to remove the dropped bytes prior to invocation . */
private void reportCorruption ( long bytes , String reason ) { } } | if ( monitor != null ) { monitor . corruption ( bytes , reason ) ; } |
public class AbcNode { /** * Return the deepest childs of this node . This is useful to browse easily
* smallest segments of parsed text , and get closer to errors .
* e . g . node A has childs B and C . < br >
* B has 3 childs D , E and F . < br >
* A . getDeepestChilds ( ) returns D , E , F , C in this order .
* If it goes too deep , you can check if node is child of B or C using
* { @ link # isChildOf ( String ) } or { @ link # isChildOf _ or _ is ( String ) } .
* @ return a List of node */
public List getDeepestChilds ( ) { } } | if ( childs . size ( ) == 0 ) { return new ArrayList ( 0 ) ; } List ret = new ArrayList ( childs . size ( ) * 3 ) ; Iterator it = getChilds ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { AbcNode child = ( AbcNode ) it . next ( ) ; if ( ! child . hasChilds ( ) ) ret . add ( child ) ; else { ret . addAll ( child . getDeepestChilds ( ) ) ; } } return ret ; |
public class ReportOpen { /** * Open the given url in default system browser . */
private void openBrowser ( URI url ) throws IOException { } } | if ( Desktop . isDesktopSupported ( ) ) { Desktop . getDesktop ( ) . browse ( url ) ; } else { LOGGER . error ( "Can not open browser because this capability is not supported on " + "your platform. You can use the link below to open the report manually." ) ; } |
public class LifecyclePolicyMarshaller { /** * Marshall the given parameter object . */
public void marshall ( LifecyclePolicy lifecyclePolicy , ProtocolMarshaller protocolMarshaller ) { } } | if ( lifecyclePolicy == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( lifecyclePolicy . getPolicyId ( ) , POLICYID_BINDING ) ; protocolMarshaller . marshall ( lifecyclePolicy . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( lifecyclePolicy . getState ( ) , STATE_BINDING ) ; protocolMarshaller . marshall ( lifecyclePolicy . getExecutionRoleArn ( ) , EXECUTIONROLEARN_BINDING ) ; protocolMarshaller . marshall ( lifecyclePolicy . getDateCreated ( ) , DATECREATED_BINDING ) ; protocolMarshaller . marshall ( lifecyclePolicy . getDateModified ( ) , DATEMODIFIED_BINDING ) ; protocolMarshaller . marshall ( lifecyclePolicy . getPolicyDetails ( ) , POLICYDETAILS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class EglCore { /** * Performs a simple surface query . */
public int querySurface ( EGLSurface eglSurface , int what ) { } } | int [ ] value = new int [ 1 ] ; EGL14 . eglQuerySurface ( mEGLDisplay , eglSurface , what , value , 0 ) ; return value [ 0 ] ; |
public class CassandraCpoAdapter { /** * Retrieves the bean from the datasource . The assumption is that the bean exists in the datasource . If the retrieve
* function defined for this beans returns more than one row , an exception will be thrown .
* @ param name The filter name which tells the datasource which beans should be returned . The name also signifies what
* data in the bean will be populated .
* @ param criteria This is an bean that has been defined within the metadata of the datasource . If the class is not
* defined an exception will be thrown . If the bean does not exist in the datasource , an exception will be thrown .
* This bean is used to specify the arguments used to retrieve the collection of beans .
* @ param result This is an bean that has been defined within the metadata of the datasource . If the class is not
* defined an exception will be thrown . If the bean does not exist in the datasource , an exception will be thrown .
* This bean is used to specify the bean type that will be returned in the collection .
* @ param wheres A collection of CpoWhere beans that define the constraints that should be used when retrieving beans
* @ param orderBy The CpoOrderBy bean that defines the order in which beans should be returned
* @ param nativeExpressions Native expression that will be used to augment the expression stored in the meta data . This
* text will be embedded at run - time
* @ return An bean of the same type as the result argument that is filled in as specified the metadata for the
* retireve .
* @ throws CpoException Thrown if there are errors accessing the datasource */
@ Override public < T , C > T retrieveBean ( String name , C criteria , T result , Collection < CpoWhere > wheres , Collection < CpoOrderBy > orderBy , Collection < CpoNativeFunction > nativeExpressions ) throws CpoException { } } | Iterator < T > it = processSelectGroup ( name , criteria , result , wheres , orderBy , nativeExpressions , true ) . iterator ( ) ; if ( it . hasNext ( ) ) { return it . next ( ) ; } else { return null ; } |
public class XNElement { /** * Save this XML into the given file .
* @ param file the file
* @ throws IOException on error */
public void save ( File file ) throws IOException { } } | try ( FileOutputStream fout = new FileOutputStream ( file ) ) { save ( fout ) ; } |
public class RestClient { /** * Create a URL in a doPriv
* @ param urlString
* @ return
* @ throws MalformedURLException */
private URL createURL ( final String urlString ) throws MalformedURLException { } } | URL url ; try { url = AccessController . doPrivileged ( new PrivilegedExceptionAction < URL > ( ) { @ Override public URL run ( ) throws MalformedURLException { return new URL ( urlString ) ; } } ) ; } catch ( PrivilegedActionException e ) { throw ( MalformedURLException ) e . getCause ( ) ; } return url ; |
public class CmsModuleImportExportRepository { /** * Checks if a given module needs to be re - exported . < p >
* @ param module the module to check
* @ param moduleFile the file representing the exported module ( doesn ' t necessarily exist )
* @ param project the project in which to check
* @ return true if the module needs to be exported */
private boolean needToExportModule ( CmsModule module , File moduleFile , CmsProject project ) { } } | if ( ! moduleFile . exists ( ) ) { LOG . info ( "Module export file doesn't exist, export is needed." ) ; try { String moduleSignature = computeModuleHash ( module , project ) ; if ( moduleSignature != null ) { m_moduleHashCache . put ( module , moduleSignature ) ; } } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } return true ; } else { if ( moduleFile . lastModified ( ) < module . getObjectCreateTime ( ) ) { return true ; } String oldModuleSignature = m_moduleHashCache . get ( module ) ; String newModuleSignature = null ; try { newModuleSignature = computeModuleHash ( module , project ) ; } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } LOG . info ( "Comparing module hashes for " + module . getName ( ) + " to check if export is needed: old = " + oldModuleSignature + ", new=" + newModuleSignature ) ; if ( ( newModuleSignature == null ) || ! Objects . equal ( oldModuleSignature , newModuleSignature ) ) { if ( newModuleSignature != null ) { m_moduleHashCache . put ( module , newModuleSignature ) ; } // if an error occurs or the module signatures don ' t match
return true ; } else { return false ; } } |
public class TextUtils { /** * Split query parameters
* @ param rawUri raw uri
* @ return map with params */
public static Map < String , String > splitQueryParameters ( Uri rawUri ) { } } | String query = rawUri . getEncodedQuery ( ) ; if ( query == null ) { return Collections . emptyMap ( ) ; } Map < String , String > paramMap = new LinkedHashMap < > ( ) ; int start = 0 ; do { int next = query . indexOf ( '&' , start ) ; int end = ( next == - 1 ) ? query . length ( ) : next ; int separator = query . indexOf ( '=' , start ) ; if ( separator > end || separator == - 1 ) { separator = end ; } String name = query . substring ( start , separator ) ; if ( ! android . text . TextUtils . isEmpty ( name ) ) { String value = ( separator == end ? "" : query . substring ( separator + 1 , end ) ) ; paramMap . put ( Uri . decode ( name ) , Uri . decode ( value ) ) ; } // Move start to end of name .
start = end + 1 ; } while ( start < query . length ( ) ) ; return Collections . unmodifiableMap ( paramMap ) ; |
public class CommercePriceListPersistenceImpl { /** * Returns all the commerce price lists where groupId = & # 63 ; .
* @ param groupId the group ID
* @ return the matching commerce price lists */
@ Override public List < CommercePriceList > findByGroupId ( long groupId ) { } } | return findByGroupId ( groupId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class CmsListItemWidget { /** * Combines the main icon title with the title for a status icon overlayed over the main icon . < p >
* @ param main the main icon title
* @ param secondary the secondary icon title
* @ return the combined icon title for the secondary icon */
String concatIconTitles ( String main , String secondary ) { } } | if ( main == null ) { main = "" ; } if ( secondary == null ) { secondary = "" ; } if ( secondary . length ( ) == 0 ) { return main ; } return main + " [" + secondary + "]" ; |
public class AppServicePlansInner { /** * Get a Virtual Network associated with an App Service plan .
* Get a Virtual Network associated with an App Service plan .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of the App Service plan .
* @ param vnetName Name of the Virtual Network .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the VnetInfoInner object */
public Observable < ServiceResponse < VnetInfoInner > > getVnetFromServerFarmWithServiceResponseAsync ( String resourceGroupName , String name , String vnetName ) { } } | if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( vnetName == null ) { throw new IllegalArgumentException ( "Parameter vnetName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . getVnetFromServerFarm ( resourceGroupName , name , vnetName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < VnetInfoInner > > > ( ) { @ Override public Observable < ServiceResponse < VnetInfoInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < VnetInfoInner > clientResponse = getVnetFromServerFarmDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class EXIHeaderEncoder { /** * Writes the EXI header according to the header options with optional
* cookie , EXI options , . .
* @ param headerChannel
* header channel
* @ param f
* factory
* @ throws EXIException
* EXI exception */
public void write ( BitEncoderChannel headerChannel , EXIFactory f ) throws EXIException { } } | try { EncodingOptions headerOptions = f . getEncodingOptions ( ) ; CodingMode codingMode = f . getCodingMode ( ) ; // EXI Cookie
if ( headerOptions . isOptionEnabled ( EncodingOptions . INCLUDE_COOKIE ) ) { // four byte field consists of four characters " $ " , " E " ,
// " X " and " I " in that order
headerChannel . encode ( '$' ) ; headerChannel . encode ( 'E' ) ; headerChannel . encode ( 'X' ) ; headerChannel . encode ( 'I' ) ; } // Distinguishing Bits 10
headerChannel . encodeNBitUnsignedInteger ( 2 , 2 ) ; // Presence Bit for EXI Options 0
boolean includeOptions = headerOptions . isOptionEnabled ( EncodingOptions . INCLUDE_OPTIONS ) ; headerChannel . encodeBoolean ( includeOptions ) ; // EXI Format Version 0-0000
headerChannel . encodeBoolean ( false ) ; // preview
headerChannel . encodeNBitUnsignedInteger ( 0 , 4 ) ; // EXI Header options and so forth
if ( includeOptions ) { writeEXIOptions ( f , headerChannel ) ; } // other than bit - packed requires [ Padding Bits ]
if ( codingMode != CodingMode . BIT_PACKED ) { headerChannel . align ( ) ; headerChannel . flush ( ) ; } } catch ( IOException e ) { throw new EXIException ( e ) ; } |
public class PubSubOutputHandler { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . ControllableResource # createControlAdapter ( ) */
public void createControlAdapter ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createControlAdapter" ) ; // we have no remote topic space control yet , but the fact
// is that one might exist due to a remote get being performed
// from this remote ME
// We must go through the existing AIHs and see .
Map aihMap = _destinationHandler . getPseudoDurableAIHMap ( ) ; Iterator aihIterator = aihMap . values ( ) . iterator ( ) ; SIBUuid8 psohRemoteMEUuid = getTargetMEUuid ( ) ; while ( aihIterator . hasNext ( ) ) { AnycastInputHandler aih = ( AnycastInputHandler ) aihIterator . next ( ) ; // get the remote ME uuid for this AIH
SIBUuid8 aihRemoteMEUuid = aih . getLocalisationUuid ( ) ; // see if it is for the same me as this PSOH
if ( aihRemoteMEUuid . equals ( psohRemoteMEUuid ) ) { // the control adapter for the remote TS alaready exists
// so we just update it
AttachedRemoteSubscriberControl attachedRSControl = aih . getControlAdapter ( ) ; // We want to work on the RemoteTopicSpaceControl rather than the AttachedRemoteSubcriberControl
_controlAdapter = attachedRSControl . getRemoteTopicSpaceControl ( ) ; _controlAdapter . registerControlAdapterAsMBean ( ) ; break ; } } if ( _controlAdapter == null ) { // there are no AIHs for getting messages from this remote ME
// We create a new one
if ( link != null ) { _controlAdapter = new LinkRemoteTopicSpaceControl ( this , _messageProcessor , link . getName ( ) ) ; // Dont register MBean until a message is sent
} else { _controlAdapter = new RemoteTopicSpaceControl ( this , null , _messageProcessor ) ; _controlAdapter . registerControlAdapterAsMBean ( ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createControlAdapter" ) ; |
public class ExecutablePredicates { /** * Checks if a candidate executable does belong to the specified class .
* @ param reference the class to check against .
* @ return the predicate . */
public static < T extends Executable > Predicate < T > executableBelongsToClass ( Class < ? > reference ) { } } | return candidate -> candidate != null && reference . equals ( candidate . getDeclaringClass ( ) ) ; |
public class FieldElement { /** * Returns the { @ code default } option value or { @ code null } . */
public final OptionElement getDefault ( ) { } } | OptionElement defaultOption = OptionElement . findByName ( options ( ) , "default" ) ; return defaultOption != null ? defaultOption : null ; |
public class CachingMultiIndexReader { /** * { @ inheritDoc } */
public IndexReader [ ] getIndexReaders ( ) { } } | IndexReader [ ] readers = new IndexReader [ subReaders . length ] ; System . arraycopy ( subReaders , 0 , readers , 0 , subReaders . length ) ; return readers ; |
public class Minimizer { /** * Removes a block from the splitter queue . This is done when it is split completely and thus no longer existant . */
private boolean removeFromSplitterQueue ( Block < S , L > block ) { } } | ElementReference ref = block . getSplitterQueueReference ( ) ; if ( ref == null ) { return false ; } splitters . remove ( ref ) ; block . setSplitterQueueReference ( null ) ; return true ; |
public class ClassProjectGridScreen { /** * SetupSFields Method . */
public void setupSFields ( ) { } } | this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . NAME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . DESCRIPTION ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . PROJECT_PATH ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . THIN_PROJECT_PATH ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . RES_PROJECT_PATH ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . PACKAGE_NAME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; |
public class ChainedTransformationTools { /** * Call when transforming a new model version delta for an operation . This will copy the { @ link ResourceTransformationContext } instance , using the extra resolver
* to resolve the children of the placeholder resource .
* @ param context the context to copy . It should be at a chained placeholder
* @ param placeholderResolver the extra resolver to use to resolve the placeholder ' s children for the model version delta we are transforming
* @ return a new { @ code ResourceTransformationContext } instance using the extra resolver */
public static ResourceTransformationContext nextInChainOperation ( ResourceTransformationContext context , PlaceholderResolver placeholderResolver ) { } } | assert context instanceof ResourceTransformationContextImpl : "Wrong type of context" ; ResourceTransformationContextImpl ctx = ( ResourceTransformationContextImpl ) context ; ResourceTransformationContext copy = ctx . copy ( placeholderResolver ) ; return copy ; |
public class Wife { /** * Get the person that this object points to . If not found return an unset
* Person .
* @ return the mother . */
public Person getMother ( ) { } } | if ( ! isSet ( ) ) { return new Person ( ) ; } final Person mother = ( Person ) find ( getToString ( ) ) ; if ( mother == null ) { return new Person ( ) ; } else { return mother ; } |
public class Utils { /** * Compare two lists
* @ param list1
* @ param list2
* @ return */
public static < T > boolean compareLists ( List < T > list1 , List < T > list2 ) { } } | if ( list1 == list2 ) return true ; if ( list1 == null || list2 == null ) return false ; if ( list1 . size ( ) != list2 . size ( ) ) return false ; for ( int i = 0 ; i < list1 . size ( ) ; i ++ ) { if ( ! list1 . get ( i ) . equals ( list2 . get ( i ) ) ) { return false ; } } return true ; |
public class DeviceProxy { public void add_logging_target ( String target_type , String target_name ) throws DevFailed { } } | deviceProxyDAO . add_logging_target ( this , target_type + "::" + target_name ) ; |
public class ABITrace { /** * Returns the scaling factor necessary to allow all of the traces to fit vertically
* into the specified space .
* @ param height - required height in pixels
* @ return - scaling factor */
private double calculateScale ( int height ) { } } | double newScale = 0.0 ; double max = ( double ) getMaximum ( ) ; double ht = ( double ) height ; newScale = ( ( ht - 50.0 ) ) / max ; return newScale ; |
public class Texture { /** * Returns an instance that can be used to render a sub - region of this texture . */
public Tile tile ( float x , float y , float width , float height ) { } } | final float tileX = x , tileY = y , tileWidth = width , tileHeight = height ; return new Tile ( ) { @ Override public Texture texture ( ) { return Texture . this ; } @ Override public float width ( ) { return tileWidth ; } @ Override public float height ( ) { return tileHeight ; } @ Override public float sx ( ) { return tileX / displayWidth ; } @ Override public float sy ( ) { return tileY / displayHeight ; } @ Override public float tx ( ) { return ( tileX + tileWidth ) / displayHeight ; } @ Override public float ty ( ) { return ( tileY + tileWidth ) / displayHeight ; } @ Override public void addToBatch ( QuadBatch batch , int tint , AffineTransform tx , float x , float y , float width , float height ) { batch . addQuad ( texture ( ) , tint , tx , x , y , width , height , tileX , tileY , tileWidth , tileHeight ) ; } @ Override public void addToBatch ( QuadBatch batch , int tint , AffineTransform tx , float dx , float dy , float dw , float dh , float sx , float sy , float sw , float sh ) { batch . addQuad ( texture ( ) , tint , tx , dx , dy , dw , dh , tileX + sx , tileY + sy , sw , sh ) ; } } ; |
public class ActionFormMapper { protected Optional < FormYourCollectionResource > findListableYourCollection ( PropertyDesc pd , FormMappingOption option ) { } } | final Class < ? > propertyType = pd . getPropertyType ( ) ; final List < FormYourCollectionResource > yourCollections = option . getYourCollections ( ) ; return yourCollections . stream ( ) // checking defined type and instance type
. filter ( res -> propertyType . equals ( res . getYourType ( ) ) ) // just type in form mapping ( to avoid complexity )
. filter ( res -> res . getYourCollectionCreator ( ) . apply ( Collections . emptyList ( ) ) instanceof List ) . findFirst ( ) ; // basically only - one here ( if no duplicate type specified ) |
public class RunCommandParametersMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RunCommandParameters runCommandParameters , ProtocolMarshaller protocolMarshaller ) { } } | if ( runCommandParameters == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( runCommandParameters . getRunCommandTargets ( ) , RUNCOMMANDTARGETS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DbTableAccess { /** * Queries the database for a set of rows matching the given selectors . Returns the list
* of columns if specified . Returns all available columns if no columns are specified .
* @ param recordSelectors List of record selectors ( WHERE )
* @ param fieldSelectors List of column selectors ( SELECT ) or null to select all
* @ param groupBySelectors Field selectors to group records ( GROUP BY ) or null for not grouping
* @ param orderBySpecifiers Field selectors to order records ( ORDER BY ) or null for no order
* @ param limit Limit on the number of records returned ( LIMIT ) or null if not used
* @ param offset Start offset or returned records ( OFFSET ) or null if not used . Should be used only
* with limit and orderBySpecifiers
* @ return
* @ throws Exception */
public JSONArray query ( List < RecordSelector > recordSelectors , List < FieldSelector > fieldSelectors , List < FieldSelector > groupBySelectors , List < OrderSpecifier > orderBySpecifiers , Integer limit , Integer offset ) throws Exception { } } | OperationAccess operationAccess = tableSchema . getQueryAccess ( ) ; if ( false == operationAccess . isAllowed ( ) ) { throw new Exception ( "Attempting to query a table while the privilege is not allowed: " + tableSchema . getLogicalName ( ) + " (" + tableSchema . getPhysicalName ( ) + ")" ) ; } List < FieldSelector > effectiveFieldSelectors = new Vector < FieldSelector > ( ) ; { // Create a list of queried fields
if ( null == fieldSelectors ) { // Select all available column for read
for ( ColumnData columnData : tableSchema . getColumns ( ) ) { if ( columnData . isReadable ( ) ) { effectiveFieldSelectors . add ( new FieldSelectorColumn ( columnData . getColumnName ( ) ) ) ; } } } else { for ( FieldSelector fieldSelector : fieldSelectors ) { for ( ColumnData columnData : fieldSelector . getColumnData ( tableSchema ) ) { if ( null == columnData || false == columnData . isReadable ( ) ) { throw new Exception ( "Invalid selection on " + fieldSelector + " which is not available in table " + tableSchema . getLogicalName ( ) + "(" + tableSchema . getPhysicalName ( ) + ")" ) ; } } effectiveFieldSelectors . add ( fieldSelector ) ; } } // Sort . This offers greater reusability of the prepared statement .
Collections . sort ( effectiveFieldSelectors , fieldSelectorComparator ) ; } // groupBy fields
List < FieldSelector > effectiveGroupBySelectors = new Vector < FieldSelector > ( ) ; { if ( null != groupBySelectors ) { for ( FieldSelector fieldSelector : groupBySelectors ) { for ( ColumnData columnData : fieldSelector . getColumnData ( tableSchema ) ) { if ( null == columnData || false == columnData . isReadable ( ) ) { throw new Exception ( "Invalid GROUP BY on " + fieldSelector + " which is not available in table " + tableSchema . getLogicalName ( ) + "(" + tableSchema . getPhysicalName ( ) + ")" ) ; } } effectiveGroupBySelectors . add ( fieldSelector ) ; } } Collections . sort ( effectiveGroupBySelectors , fieldSelectorComparator ) ; } // ORDER BY specifiers
List < OrderSpecifier > effectiveOrderBySelectors = new Vector < OrderSpecifier > ( ) ; { if ( null != orderBySpecifiers ) { for ( OrderSpecifier orderSpecifier : orderBySpecifiers ) { for ( ColumnData columnData : orderSpecifier . getColumnData ( tableSchema ) ) { if ( null == columnData || false == columnData . isReadable ( ) ) { throw new Exception ( "Invalid ORDER BY on " + orderSpecifier + " which is not available in table " + tableSchema . getLogicalName ( ) + "(" + tableSchema . getPhysicalName ( ) + ")" ) ; } } effectiveOrderBySelectors . add ( orderSpecifier ) ; } } } // Figure out all WHERE clauses
List < RecordSelector > effectiveRecordSelectors = computeEffectiveWhereClauses ( recordSelectors , operationAccess ) ; // Create SQL command
PreparedStatement pstmt = null ; { StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; pw . print ( "SELECT " ) ; { boolean first = true ; for ( FieldSelector fieldSelector : effectiveFieldSelectors ) { if ( first ) { first = false ; } else { pw . print ( "," ) ; } pw . print ( fieldSelector . getQueryString ( tableSchema , SqlElement . Phase . SELECT ) ) ; } } pw . print ( " FROM " ) ; pw . print ( tableSchema . getPhysicalName ( ) ) ; { boolean first = true ; for ( RecordSelector exp : effectiveRecordSelectors ) { if ( first ) { pw . print ( " WHERE " ) ; first = false ; } else { pw . print ( " AND " ) ; } pw . print ( exp . getQueryString ( tableSchema , SqlElement . Phase . WHERE ) ) ; } } if ( effectiveGroupBySelectors . size ( ) > 0 ) { boolean first = true ; for ( FieldSelector groupColumn : effectiveGroupBySelectors ) { if ( first ) { pw . print ( " GROUP BY " ) ; first = false ; } else { pw . print ( "," ) ; } pw . print ( groupColumn . getQueryString ( tableSchema , SqlElement . Phase . GROUP_BY ) ) ; } } if ( effectiveOrderBySelectors . size ( ) > 0 ) { boolean first = true ; for ( OrderSpecifier orderSpecifier : effectiveOrderBySelectors ) { if ( first ) { pw . print ( " ORDER BY " ) ; first = false ; } else { pw . print ( "," ) ; } pw . print ( orderSpecifier . getQueryString ( tableSchema , SqlElement . Phase . ORDER_BY ) ) ; } } if ( null != limit ) { int limitInt = limit . intValue ( ) ; pw . print ( " LIMIT " ) ; pw . print ( limitInt ) ; if ( null != offset ) { int offsetInt = offset . intValue ( ) ; pw . print ( " OFFSET " ) ; pw . print ( offsetInt ) ; } } pw . flush ( ) ; String sqlQuery = sw . toString ( ) ; pstmt = connection . prepareStatement ( sqlQuery ) ; // logger . info ( " SQL Query : " + sqlQuery ) ;
// Populate prepared statement
int index = 1 ; for ( FieldSelector fs : effectiveFieldSelectors ) { for ( TypedValue value : fs . getQueryValues ( tableSchema , variables ) ) { // logger . info ( " Value " + value . getValue ( ) + " ( " + value . getColumnDataType ( ) + " ) " ) ;
ColumnDataUtils . writeToPreparedStatement ( pstmt , index , value ) ; ++ index ; } } for ( RecordSelector exp : effectiveRecordSelectors ) { for ( TypedValue value : exp . getQueryValues ( tableSchema , variables ) ) { // logger . info ( " Value " + value . getValue ( ) + " ( " + value . getColumnDataType ( ) + " ) " ) ;
ColumnDataUtils . writeToPreparedStatement ( pstmt , index , value ) ; ++ index ; } } for ( FieldSelector groupBySelector : effectiveGroupBySelectors ) { for ( TypedValue value : groupBySelector . getQueryValues ( tableSchema , variables ) ) { // logger . info ( " Value " + value . getValue ( ) + " ( " + value . getColumnDataType ( ) + " ) " ) ;
ColumnDataUtils . writeToPreparedStatement ( pstmt , index , value ) ; ++ index ; } } for ( OrderSpecifier orderSpecifier : effectiveOrderBySelectors ) { for ( TypedValue value : orderSpecifier . getQueryValues ( tableSchema , variables ) ) { // logger . info ( " Value " + value . getValue ( ) + " ( " + value . getColumnDataType ( ) + " ) " ) ;
ColumnDataUtils . writeToPreparedStatement ( pstmt , index , value ) ; ++ index ; } } } // Now , we need to retrieve the objects
JSONArray array = ColumnDataUtils . executeStatementToJson ( pstmt ) ; return array ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcElectricMotorTypeEnum createIfcElectricMotorTypeEnumFromString ( EDataType eDataType , String initialValue ) { } } | IfcElectricMotorTypeEnum result = IfcElectricMotorTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class ListViolationEventsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListViolationEventsRequest listViolationEventsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listViolationEventsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listViolationEventsRequest . getStartTime ( ) , STARTTIME_BINDING ) ; protocolMarshaller . marshall ( listViolationEventsRequest . getEndTime ( ) , ENDTIME_BINDING ) ; protocolMarshaller . marshall ( listViolationEventsRequest . getThingName ( ) , THINGNAME_BINDING ) ; protocolMarshaller . marshall ( listViolationEventsRequest . getSecurityProfileName ( ) , SECURITYPROFILENAME_BINDING ) ; protocolMarshaller . marshall ( listViolationEventsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listViolationEventsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MapProxy { /** * 创建代理Map < br >
* 此类对Map做一次包装 , 提供各种getXXX方法
* @ param map 被代理的Map
* @ return { @ link MapProxy } */
public static MapProxy create ( Map < ? , ? > map ) { } } | return ( map instanceof MapProxy ) ? ( MapProxy ) map : new MapProxy ( map ) ; |
public class CronExpression { /** * Advance the calendar to the particular hour paying particular attention
* to daylight saving problems .
* @ param cal the calendar to operate on
* @ param hour the hour to set */
protected void setCalendarHour ( Calendar cal , int hour ) { } } | cal . set ( java . util . Calendar . HOUR_OF_DAY , hour ) ; if ( cal . get ( java . util . Calendar . HOUR_OF_DAY ) != hour && hour != 24 ) { cal . set ( java . util . Calendar . HOUR_OF_DAY , hour + 1 ) ; } |
public class Minimizer { /** * Retrieves the local instance of this minimizer .
* The minimizer acts like a singleton of which each thread possesses their own . The minimizer instance returned by
* this method is the one belonging to the calling thread . Therefore , it is not safe to share such an instance
* between two threads .
* @ param < S >
* state class .
* @ param < L >
* transition label class .
* @ return The minimizers local instance . */
@ SuppressWarnings ( "unchecked" ) public static < S , L > Minimizer < S , L > getLocalInstance ( ) { } } | return ( Minimizer < S , L > ) LOCAL_INSTANCE . get ( ) ; |
public class AnsiPrintStream { /** * Process character set sequence .
* @ param options options
* @ return true if the charcter set select command was processed . */
private boolean processCharsetSelect ( ArrayList < Object > options ) { } } | int set = optionInt ( options , 0 ) ; char seq = ( ( Character ) options . get ( 1 ) ) . charValue ( ) ; processCharsetSelect ( set , seq ) ; return true ; |
public class RecurlyClient { /** * Terminate a particular { @ link Subscription } by it ' s UUID
* @ param subscription Subscription to terminate */
public void terminateSubscription ( final Subscription subscription , final RefundOption refund ) { } } | doPUT ( Subscription . SUBSCRIPTION_RESOURCE + "/" + subscription . getUuid ( ) + "/terminate?refund=" + refund , subscription , Subscription . class ) ; |
public class DmnEngineConfigurationBuilder { /** * Modify the given DMN engine configuration and return it . */
public DefaultDmnEngineConfiguration build ( ) { } } | List < DmnDecisionEvaluationListener > decisionEvaluationListeners = createCustomPostDecisionEvaluationListeners ( ) ; dmnEngineConfiguration . setCustomPostDecisionEvaluationListeners ( decisionEvaluationListeners ) ; // override the decision table handler
DmnTransformer dmnTransformer = dmnEngineConfiguration . getTransformer ( ) ; dmnTransformer . getElementTransformHandlerRegistry ( ) . addHandler ( Definitions . class , new DecisionRequirementsDefinitionTransformHandler ( ) ) ; dmnTransformer . getElementTransformHandlerRegistry ( ) . addHandler ( Decision . class , new DecisionDefinitionHandler ( ) ) ; // do not override the script engine resolver if set
if ( dmnEngineConfiguration . getScriptEngineResolver ( ) == null ) { ensureNotNull ( "scriptEngineResolver" , scriptEngineResolver ) ; dmnEngineConfiguration . setScriptEngineResolver ( scriptEngineResolver ) ; } // do not override the el provider if set
if ( dmnEngineConfiguration . getElProvider ( ) == null ) { ensureNotNull ( "expressionManager" , expressionManager ) ; ProcessEngineElProvider elProvider = new ProcessEngineElProvider ( expressionManager ) ; dmnEngineConfiguration . setElProvider ( elProvider ) ; } return dmnEngineConfiguration ; |
public class JerseyEmoResource { /** * Sends the optionally provided request entity using the provided method . The response entity is then
* deserialized to the provided type . */
private < T > T send ( String method , Class < T > responseType , @ Nullable Object entity ) { } } | // Narrow the response type
if ( responseType == EmoResponse . class ) { // noinspection unchecked
return ( T ) toEmoResponse ( entity == null ? builder ( ) . method ( method , ClientResponse . class ) : builder ( ) . method ( method , ClientResponse . class , entity ) ) ; } try { ClientResponse response = entity == null ? builder ( ) . method ( method , ClientResponse . class ) : builder ( ) . method ( method , ClientResponse . class , entity ) ; // This is as per Jersey ' s WebResource builder ( ) code .
if ( response . getStatus ( ) >= 300 ) { throw new UniformInterfaceException ( response ) ; } if ( ! response . getType ( ) . equals ( MediaType . APPLICATION_JSON_TYPE ) ) { return response . getEntity ( responseType ) ; } return EntityHelper . getEntity ( response . getEntity ( InputStream . class ) , responseType ) ; } catch ( UniformInterfaceException e ) { throw asEmoClientException ( e ) ; } |
public class AbstractSource { /** * ( Non Java - doc ) .
* @ see org . mobicents . media . MediaSource # start ( ) . */
public void start ( ) { } } | synchronized ( worker ) { // check scheduler
try { // prevent duplicate starting
if ( started ) { return ; } if ( scheduler == null ) { throw new IllegalArgumentException ( "Scheduler is not assigned" ) ; } this . txBytes = 0 ; this . txPackets = 0 ; // reset media time and sequence number
timestamp = this . initialOffset ; this . initialOffset = 0 ; sn = 0 ; // switch indicator that source has been started
started = true ; // just started component always synchronized as well
this . isSynchronized = true ; if ( mediaSink != null ) mediaSink . start ( ) ; // scheduler worker
worker . reinit ( ) ; scheduler . submit ( worker , worker . getQueueNumber ( ) ) ; // started !
started ( ) ; } catch ( Exception e ) { started = false ; failed ( e ) ; logger . error ( e ) ; } } |
public class Promises { /** * Delays completion of provided { @ code promise } for
* the defined period of time .
* @ param promise the { @ code Promise } to be delayed
* @ param delayMillis delay in millis
* @ return completed { @ code Promise } */
@ Contract ( pure = true ) @ NotNull public static < T > Promise < T > delay ( @ NotNull Promise < T > promise , long delayMillis ) { } } | if ( delayMillis <= 0 ) return promise ; MaterializedPromise < T > materializedPromise = promise . materialize ( ) ; return Promise . ofCallback ( cb -> getCurrentEventloop ( ) . delay ( delayMillis , ( ) -> materializedPromise . whenComplete ( cb ) ) ) ; |
public class ApiOvhDomain { /** * Post a new corporation trademark information according to Afnic
* REST : POST / domain / data / afnicCorporationTrademarkInformation
* @ param inpiNumber [ required ] Number of the Inpi declaration
* @ param inpiTrademarkOwner [ required ] Owner of the trademark
* @ param contactId [ required ] Contact ID related to the Inpi additional information */
public OvhAfnicCorporationTrademarkContact data_afnicCorporationTrademarkInformation_POST ( Long contactId , String inpiNumber , String inpiTrademarkOwner ) throws IOException { } } | String qPath = "/domain/data/afnicCorporationTrademarkInformation" ; StringBuilder sb = path ( qPath ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "contactId" , contactId ) ; addBody ( o , "inpiNumber" , inpiNumber ) ; addBody ( o , "inpiTrademarkOwner" , inpiTrademarkOwner ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhAfnicCorporationTrademarkContact . class ) ; |
public class SubscriptionService { /** * getVersionDetails
* @ param versionURL
* @ param authorizationToken
* @ return VersionDetails */
public VersionDetails getVersionDetails ( String versionURL , String authorizationToken ) { } } | LOG . info ( "getVersionDetails with token: " + authorizationToken ) ; VersionDetailsResponse versionDetailsResponse = ( VersionDetailsResponse ) doRequest ( new HttpGet ( versionURL ) , authorizationToken , VersionDetailsResponse . class ) ; return versionDetailsResponse . data ; |
public class HtmlUtils { /** * Generate the HTML code for an attribute .
* @ param attributeName the name of the attribute .
* @ param value the value of the attribute .
* @ return the HTML attribute . */
public static String generateAttribute ( String attributeName , String value ) { } } | return SgmlUtils . generateAttribute ( attributeName , value ) ; |
public class JavaParsingAtomicArrayQueueGenerator { /** * Given a variable declaration of some sort , check it ' s name and type and
* if it looks like any of the key type changes between unsafe and atomic
* queues , perform the conversion to change it ' s type .
* @ param node
* @ param name */
void processSpecialNodeTypes ( NodeWithType < ? , Type > node , String name ) { } } | Type type = node . getType ( ) ; if ( "buffer" . equals ( name ) && isRefArray ( type , "E" ) ) { node . setType ( atomicRefArrayType ( ( ArrayType ) type ) ) ; } else if ( "sBuffer" . equals ( name ) && isLongArray ( type ) ) { node . setType ( atomicLongArrayType ( ) ) ; } else if ( PrimitiveType . longType ( ) . equals ( type ) ) { switch ( name ) { case "mask" : case "offset" : case "seqOffset" : case "lookAheadSeqOffset" : case "lookAheadElementOffset" : node . setType ( PrimitiveType . intType ( ) ) ; } } |
public class Rythm { /** * Initialize default engine instance with specified configuration
* < p > Note this method can not be called more than once during a JVM lifecycle .
* if the default engine instance is created already then
* an < code > IllegalStateException < / code > will be thrown out < / p >
* < p > When the default engine ' s { @ link RythmEngine # shutdown ( ) shutdown } method get called
* the default engine instance will be discard . Calling any servicing method of
* < code > Rythm < / code > will cause an new < code > RythmEngine < / code > initialized as
* the new default engine < / p >
* @ param conf the configurations */
public static void init ( Map < String , ? > conf ) { } } | if ( null != engine ) throw new IllegalStateException ( "Rythm is already initialized" ) ; engine = new RythmEngine ( conf ) ; // See # 296
ShutdownService service = getShutdownService ( engine . conf ( ) . gae ( ) ) ; service . setShutdown ( new Runnable ( ) { @ Override public void run ( ) { if ( null != engine ) { engine . shutdown ( ) ; } } } ) ; engine . setShutdownListener ( new RythmEngine . IShutdownListener ( ) { @ Override public void onShutdown ( ) { Rythm . engine = null ; } } ) ; |
public class VoltDB { /** * turn off client interface as fast as possible */
private static boolean turnOffClientInterface ( ) { } } | // we don ' t expect this to ever fail , but if it does , skip to dying immediately
VoltDBInterface vdbInstance = instance ( ) ; if ( vdbInstance != null ) { ClientInterface ci = vdbInstance . getClientInterface ( ) ; if ( ci != null ) { if ( ! ci . ceaseAllPublicFacingTrafficImmediately ( ) ) { return false ; } } } return true ; |
public class VpnGatewaysInner { /** * Updates virtual wan vpn gateway tags .
* @ param resourceGroupName The resource group name of the VpnGateway .
* @ param gatewayName The name of the gateway .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < VpnGatewayInner > updateTagsAsync ( String resourceGroupName , String gatewayName , final ServiceCallback < VpnGatewayInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( updateTagsWithServiceResponseAsync ( resourceGroupName , gatewayName ) , serviceCallback ) ; |
public class AbstractDelegateArbitrateEvent { /** * 判断srcNodes是否在targetNodes中都包含 */
private boolean containAll ( List < Node > srcNodes , List < Node > targetNodes ) { } } | boolean result = true ; for ( Node node : srcNodes ) { result &= targetNodes . contains ( node ) ; } return result ; |
public class FindingSnippets { /** * [ START list _ findings _ at _ time ] */
static ImmutableList < ListFindingsResult > listFindingsAtTime ( SourceName sourceName ) { } } | try ( SecurityCenterClient client = SecurityCenterClient . create ( ) ) { // SourceName sourceName = SourceName . of ( / * organizationId = * / " 123234324 " ,
// / * sourceId = * / " 423432321 " ) ;
// 5 days ago
Instant fiveDaysAgo = Instant . now ( ) . minus ( Duration . ofDays ( 5 ) ) ; ListFindingsRequest . Builder request = ListFindingsRequest . newBuilder ( ) . setParent ( sourceName . toString ( ) ) . setReadTime ( Timestamp . newBuilder ( ) . setSeconds ( fiveDaysAgo . getEpochSecond ( ) ) . setNanos ( fiveDaysAgo . getNano ( ) ) ) ; // Call the API .
ListFindingsPagedResponse response = client . listFindings ( request . build ( ) ) ; // This creates one list for all findings . If your organization has a large number of
// findings this can cause out of memory issues . You can process them in incrementally
// by returning the Iterable returned response . iterateAll ( ) directly .
ImmutableList < ListFindingsResult > results = ImmutableList . copyOf ( response . iterateAll ( ) ) ; System . out . println ( "Findings:" ) ; System . out . println ( results ) ; return results ; } catch ( IOException e ) { throw new RuntimeException ( "Couldn't create client." , e ) ; } |
public class Utils { /** * Confirms a subscription by visiting the provided URL .
* @ param httpClient Client to use to visit URL .
* @ param subscribeUrl Confirmation URL .
* @ return Result of subscription confirmation . */
static ConfirmSubscriptionResult confirmSubscription ( HttpClient httpClient , String subscribeUrl ) { } } | try { HttpGet request = new HttpGet ( subscribeUrl ) ; HttpResponse response = httpClient . execute ( request ) ; if ( ApacheUtils . isRequestSuccessful ( response ) ) { return new StaxResponseHandler < ConfirmSubscriptionResult > ( ConfirmSubscriptionResultStaxUnmarshaller . getInstance ( ) ) . handle ( ApacheUtils . createResponse ( null , request , response , null ) ) . getResult ( ) ; } else { throw new HttpException ( "Could not confirm subscription" , response ) ; } } catch ( Exception e ) { throw new SdkClientException ( e ) ; } |
public class DbxOfficialAppConnector { /** * Decodes a Google Play Campaign attribution utm _ content field that was generated by Dropbox
* OpenWith flow . This should only be called if utm _ source = ” dropbox _ android _ openwith ” . See
* https : / / developers . google . com / analytics / devguides / collection / android / v4 / campaign for more
* information about how to use Play Store attribution .
* < p > You won ' t need to use this unless you are our official partner in openwith . < / p >
* @ param UtmContent GooglePlay utm content that has been urldecoded
* @ return Intent OpenWith intent that , when launched , will open the file the user requested to
* edit . Caller MUST convert intent into an explicit intent it can handle .
* @ throws DropboxParseException if cannot produce Intent from UtmContent */
public static Intent generateOpenWithIntentFromUtmContent ( String UtmContent ) throws DropboxParseException { } } | // Utm content is encoded a base64 - encoded marshalled bundle
// _ action is extracted and becomes intent ' s action
// _ uri is extracted and becomes intent ' s data uri
// All other items in bundle transferred to returned intent ' s extras
byte [ ] b ; try { b = Base64 . decode ( UtmContent , 0 ) ; } catch ( IllegalArgumentException ex ) { throw new DropboxParseException ( "UtmContent was not base64 encoded: " + ex . getMessage ( ) ) ; } final Parcel parcel = Parcel . obtain ( ) ; parcel . unmarshall ( b , 0 , b . length ) ; parcel . setDataPosition ( 0 ) ; Bundle bundle = parcel . readBundle ( ) ; parcel . recycle ( ) ; if ( bundle == null ) { throw new DropboxParseException ( "Could not extract bundle from UtmContent" ) ; } String action = bundle . getString ( "_action" ) ; if ( action == null ) { throw new DropboxParseException ( "_action was not present in bundle" ) ; } bundle . remove ( "_action" ) ; Uri uri = bundle . getParcelable ( "_uri" ) ; if ( uri == null ) { throw new DropboxParseException ( "_uri was not present in bundle" ) ; } bundle . remove ( "_uri" ) ; String type = bundle . getString ( "_type" ) ; if ( type == null ) { throw new DropboxParseException ( "_type was not present in bundle" ) ; } bundle . remove ( "_type" ) ; Intent openWithIntent = new Intent ( action ) ; openWithIntent . setDataAndType ( uri , type ) ; openWithIntent . putExtras ( bundle ) ; return openWithIntent ; |
public class AwsSecurityFindingFilters { /** * The parent process ID .
* @ param processParentPid
* The parent process ID . */
public void setProcessParentPid ( java . util . Collection < NumberFilter > processParentPid ) { } } | if ( processParentPid == null ) { this . processParentPid = null ; return ; } this . processParentPid = new java . util . ArrayList < NumberFilter > ( processParentPid ) ; |
public class CmsSitemapController { /** * Gets the property object which would be inherited by a sitemap entry . < p >
* @ param entry the sitemap entry
* @ param name the name of the property
* @ return the property object which would be inherited */
public CmsClientProperty getInheritedPropertyObject ( CmsClientSitemapEntry entry , String name ) { } } | CmsClientSitemapEntry currentEntry = entry ; while ( currentEntry != null ) { currentEntry = getParentEntry ( currentEntry ) ; if ( currentEntry != null ) { CmsClientProperty folderProp = currentEntry . getOwnProperties ( ) . get ( name ) ; if ( ! CmsClientProperty . isPropertyEmpty ( folderProp ) ) { return folderProp . withOrigin ( currentEntry . getSitePath ( ) ) ; } } } CmsClientProperty parentProp = getParentProperties ( ) . get ( name ) ; if ( ! CmsClientProperty . isPropertyEmpty ( parentProp ) ) { String origin = parentProp . getOrigin ( ) ; String siteRoot = CmsCoreProvider . get ( ) . getSiteRoot ( ) ; if ( origin . startsWith ( siteRoot ) ) { origin = origin . substring ( siteRoot . length ( ) ) ; } return parentProp . withOrigin ( origin ) ; } return null ; |
public class XMLBuilder2 { /** * Construct a builder for new XML document with a default namespace .
* The document will be created with the given root element , and the builder
* returned by this method will serve as the starting - point for any further
* document additions .
* @ param name
* the name of the document ' s root element .
* @ param namespaceURI
* default namespace URI for document , ignored if null or empty .
* @ param enableExternalEntities
* enable external entities ; beware of XML External Entity ( XXE ) injection .
* @ param isNamespaceAware
* enable or disable namespace awareness in the underlying
* { @ link DocumentBuilderFactory }
* @ return
* a builder node that can be used to add more nodes to the XML document .
* @ throws XMLBuilderRuntimeException
* to wrap { @ link ParserConfigurationException } */
public static XMLBuilder2 create ( String name , String namespaceURI , boolean enableExternalEntities , boolean isNamespaceAware ) { } } | try { return new XMLBuilder2 ( createDocumentImpl ( name , namespaceURI , enableExternalEntities , isNamespaceAware ) ) ; } catch ( ParserConfigurationException e ) { throw wrapExceptionAsRuntimeException ( e ) ; } |
public class WebAppDescriptorImplBase { /** * TODO Add @ Override */
@ Override public List < FILTERTYPE > getAllFilter ( ) { } } | final List < FILTERTYPE > list = new ArrayList < FILTERTYPE > ( ) ; final List < Node > nodeList = model . get ( "filter" ) ; for ( final Node node : nodeList ) { final FILTERTYPE filter = this . createNewFilterViewForModel ( node ) ; list . add ( filter ) ; } return list ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcWall ( ) { } } | if ( ifcWallEClass == null ) { ifcWallEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 758 ) ; } return ifcWallEClass ; |
public class BigtableTableAdminGrpcClient { /** * { @ inheritDoc } */
@ Override public void waitForReplication ( BigtableTableName tableName , long timeout ) throws InterruptedException , TimeoutException { } } | // A backoff that randomizes with an interval of 10s .
ExponentialBackOff backOff = new ExponentialBackOff . Builder ( ) . setInitialIntervalMillis ( 10 * 1000 ) . setMaxIntervalMillis ( 10 * 1000 ) . setMaxElapsedTimeMillis ( Ints . checkedCast ( timeout * 1000 ) ) . build ( ) ; waitForReplication ( tableName , backOff ) ; |
public class ConvolutionLayer { /** * preOutput4d : Used so that ConvolutionLayer subclasses ( such as Convolution1DLayer ) can maintain their standard
* non - 4d preOutput method , while overriding this to return 4d activations ( for use in backprop ) without modifying
* the public API */
protected Pair < INDArray , INDArray > preOutput4d ( boolean training , boolean forBackprop , LayerWorkspaceMgr workspaceMgr ) { } } | return preOutput ( training , forBackprop , workspaceMgr ) ; |
public class SnapshotsInner { /** * Creates or updates a snapshot .
* @ param resourceGroupName The name of the resource group .
* @ param snapshotName The name of the snapshot that is being created . The name can ' t be changed after the snapshot is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The max name length is 80 characters .
* @ param snapshot Snapshot object supplied in the body of the Put disk operation .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < SnapshotInner > createOrUpdateAsync ( String resourceGroupName , String snapshotName , SnapshotInner snapshot , final ServiceCallback < SnapshotInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , snapshotName , snapshot ) , serviceCallback ) ; |
public class FaceList { /** * Adds a vertex to the end of this list . */
public void add ( Face vtx ) { } } | if ( head == null ) { head = vtx ; } else { tail . next = vtx ; } vtx . next = null ; tail = vtx ; |
public class Utils { /** * Gets a stream of values from a collection of range resources . */
public static LongStream rangeValues ( Collection < Protos . Resource > resources ) { } } | checkNotNull ( resources ) ; return resources . stream ( ) . filter ( Protos . Resource :: hasRanges ) . flatMap ( r -> r . getRanges ( ) . getRangeList ( ) . stream ( ) ) . flatMapToLong ( Utils :: rangeValues ) ; |
public class HttpSession { /** * Sets a particular value for a session token . If the value is null , that token is deleted from
* the session .
* @ param tokenName the token name
* @ param value the new value of the token , or null , if the token has to be deleted */
public void setTokenValue ( String tokenName , Cookie value ) { } } | if ( value == null ) { tokenValues . remove ( tokenName ) ; } else { tokenValues . put ( tokenName , value ) ; } |
public class BackendServiceClient { /** * Deletes the specified BackendService resource .
* < p > Sample code :
* < pre > < code >
* try ( BackendServiceClient backendServiceClient = BackendServiceClient . create ( ) ) {
* ProjectGlobalBackendServiceName backendService = ProjectGlobalBackendServiceName . of ( " [ PROJECT ] " , " [ BACKEND _ SERVICE ] " ) ;
* Operation response = backendServiceClient . deleteBackendService ( backendService ) ;
* < / code > < / pre >
* @ param backendService Name of the BackendService resource to delete .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation deleteBackendService ( ProjectGlobalBackendServiceName backendService ) { } } | DeleteBackendServiceHttpRequest request = DeleteBackendServiceHttpRequest . newBuilder ( ) . setBackendService ( backendService == null ? null : backendService . toString ( ) ) . build ( ) ; return deleteBackendService ( request ) ; |
public class JPasswordObfuscator { /** * Obfuscate data using supplied master key and algorithm version .
* @ param masterKey master key to use for obfuscation
* @ param data data to obfuscate
* @ param version obfuscation algorithm version to use
* @ return string containing obfuscated data ; use { @ link # unObfuscate } to get secret data from this string */
public String obfuscate ( char [ ] masterKey , byte [ ] data , int version ) { } } | Objects . requireNonNull ( masterKey ) ; Objects . requireNonNull ( data ) ; switch ( version ) { case 1 : return v1Obfuscator . obfuscate ( masterKey , data ) . toString ( ) ; default : throw new IllegalArgumentException ( "Unsupported version: " + version ) ; } |
public class CmsSearchFieldConfiguration { /** * Creates the Lucene Document with this field configuration for the provided VFS resource , search index and content . < p >
* This triggers the indexing process for the given VFS resource according to the configuration
* of the provided index . < p >
* The provided index resource contains the basic contents to index .
* The provided search index contains the configuration what to index , such as the locale and
* possible special field mappings . < p >
* @ param cms the OpenCms user context used to access the OpenCms VFS
* @ param resource the resource to create the Lucene document from
* @ param index the search index to create the Document for
* @ param extraction the plain text content extracted from the document
* @ return the Search Document for the given VFS resource and the given search index
* @ throws CmsException if something goes wrong */
public I_CmsSearchDocument createDocument ( CmsObject cms , CmsResource resource , I_CmsSearchIndex index , I_CmsExtractionResult extraction ) throws CmsException { } } | m_index = ( CmsSearchIndex ) index ; I_CmsSearchDocument document = m_index . createEmptyDocument ( resource ) ; List < CmsProperty > propertiesSearched = cms . readPropertyObjects ( resource , true ) ; List < CmsProperty > properties = cms . readPropertyObjects ( resource , false ) ; document = appendContentBlob ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendPath ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendType ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendFileSize ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendDates ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendLocales ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendProperties ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendCategories ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendFieldMappings ( document , cms , resource , extraction , properties , propertiesSearched ) ; document = appendAdditionalValuesToDcoument ( document , cms , resource , extraction , properties , propertiesSearched ) ; return document ; |
public class DescribeEntityAggregatesResult { /** * The number of entities that are affected by each of the specified events .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setEntityAggregates ( java . util . Collection ) } or { @ link # withEntityAggregates ( java . util . Collection ) } if you
* want to override the existing values .
* @ param entityAggregates
* The number of entities that are affected by each of the specified events .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeEntityAggregatesResult withEntityAggregates ( EntityAggregate ... entityAggregates ) { } } | if ( this . entityAggregates == null ) { setEntityAggregates ( new java . util . ArrayList < EntityAggregate > ( entityAggregates . length ) ) ; } for ( EntityAggregate ele : entityAggregates ) { this . entityAggregates . add ( ele ) ; } return this ; |
public class Parser { /** * 12.9 The return Statement */
private ParseTree parseReturnStatement ( ) { } } | SourcePosition start = getTreeStartLocation ( ) ; eat ( TokenType . RETURN ) ; ParseTree expression = null ; if ( ! peekImplicitSemiColon ( ) ) { expression = parseExpression ( ) ; } eatPossibleImplicitSemiColon ( ) ; return new ReturnStatementTree ( getTreeLocation ( start ) , expression ) ; |
public class DateTimePickerBase { /** * { @ inheritDoc } */
@ Override public void setMinView ( final DateTimePickerView dateTimePickerView ) { } } | this . minView = dateTimePickerView ; // We keep the view select the same as the min view
if ( viewSelect != minView ) { setViewSelect ( dateTimePickerView ) ; } |
public class XADataLogger { /** * prepares the Logger for writing . The current content is removed .
* @ param dataRecorder
* @ throws IOException
* @ throws InterruptedException */
void prepareForWrite ( long xaDataRecorderId ) throws IOException , InterruptedException { } } | this . dataLogger . reopen ( AccessMode . WRITE ) ; this . writeStartSequence ( xaDataRecorderId ) ; |
public class BPMExchangeHandler { /** * { @ inheritDoc } */
@ Override protected void doStop ( ) { } } | KnowledgeRuntimeManagerRegistry . removeRuntimeManager ( getServiceDomain ( ) . getName ( ) , getServiceName ( ) ) ; try { _runtimeManager . close ( ) ; } finally { _runtimeManager = null ; super . doStop ( ) ; } |
public class DefaultURLTemplatesFactory { /** * Returns URL template name of the given type ( by key ) from the
* desired reference group .
* @ param refGroupName name of a group of templates from the config file .
* @ param key type of the template
* @ return template name */
public String getTemplateNameByRef ( String refGroupName , String key ) { } } | assert _urlTemplates != null : "The template config file has not been loaded." ; if ( _urlTemplates == null ) { return null ; } String ref = _urlTemplates . getTemplateNameByRef ( refGroupName , key ) ; if ( ref == null ) { // If the template is a secure template , look for the secure default
// before resolving to the default
if ( key . equals ( URLTemplatesFactory . SECURE_RENDER_TEMPLATE ) || key . equals ( URLTemplatesFactory . SECURE_ACTION_TEMPLATE ) || key . equals ( URLTemplatesFactory . SECURE_RESOURCE_TEMPLATE ) ) { ref = _urlTemplates . getTemplateNameByRef ( refGroupName , URLTemplatesFactory . SECURE_DEFAULT_TEMPLATE ) ; } } return ref ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.