signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ColumnNameHelper { /** * return the max column * note that comparator should not be of CompositeType ! * @ param b1 lhs * @ param b2 rhs * @ param comparator the comparator to use * @ return the biggest column according to comparator */ private static ByteBuffer max ( ByteBuffer b1 , ByteBuffer b2 , AbstractType < ? > comparator ) { } }
if ( b1 == null ) return b2 ; if ( b2 == null ) return b1 ; if ( comparator . compare ( b1 , b2 ) >= 0 ) return b1 ; return b2 ;
public class WstxEventReader { /** * Method used to locate error message description to use . * Calls sub - classes < code > getErrorDesc ( ) < / code > first , and only * if no message found , uses default messages defined here . */ protected final String findErrorDesc ( int errorType , int currEvent ) { } }
String msg = getErrorDesc ( errorType , currEvent ) ; if ( msg != null ) { return msg ; } switch ( errorType ) { case ERR_GETELEMTEXT_NOT_START_ELEM : return "Current state not START_ELEMENT when calling getElementText()" ; case ERR_GETELEMTEXT_NON_TEXT_EVENT : return "Expected a text token" ; case ERR_NEXTTAG_NON_WS_TEXT : return "Only all-whitespace CHARACTERS/CDATA (or SPACE) allowed for nextTag()" ; case ERR_NEXTTAG_WRONG_TYPE : return "Should only encounter START_ELEMENT/END_ELEMENT, SPACE, or all-white-space CHARACTERS" ; } // should never happen , but it ' d be bad to throw another exception . . . return "Internal error (unrecognized error type: " + errorType + ")" ;
public class TableCellWithActionExample { /** * Parses a date string . * @ param dateString the date string to parse * @ return a date corresponding to the given dateString , or null on error . */ private static Date parse ( final String dateString ) { } }
try { return new SimpleDateFormat ( "dd/mm/yyyy" ) . parse ( dateString ) ; } catch ( ParseException e ) { LOG . error ( "Error parsing date: " + dateString , e ) ; return null ; }
public class MarkLogicClientImpl { /** * executes BooleanQuery * @ param queryString * @ param bindings * @ param tx * @ param includeInferred * @ param baseURI * @ return */ public boolean performBooleanQuery ( String queryString , SPARQLQueryBindingSet bindings , Transaction tx , boolean includeInferred , String baseURI ) { } }
SPARQLQueryDefinition qdef = sparqlManager . newQueryDefinition ( queryString ) ; if ( notNull ( baseURI ) && ! baseURI . isEmpty ( ) ) { qdef . setBaseUri ( baseURI ) ; } qdef . setIncludeDefaultRulesets ( includeInferred ) ; if ( notNull ( ruleset ) ) { qdef . setRulesets ( ruleset ) ; } if ( notNull ( getConstrainingQueryDefinition ( ) ) ) { qdef . setConstrainingQueryDefinition ( getConstrainingQueryDefinition ( ) ) ; qdef . setDirectory ( getConstrainingQueryDefinition ( ) . getDirectory ( ) ) ; qdef . setCollections ( getConstrainingQueryDefinition ( ) . getCollections ( ) ) ; qdef . setResponseTransform ( getConstrainingQueryDefinition ( ) . getResponseTransform ( ) ) ; qdef . setOptionsName ( getConstrainingQueryDefinition ( ) . getOptionsName ( ) ) ; } if ( notNull ( graphPerms ) ) { qdef . setUpdatePermissions ( graphPerms ) ; } return sparqlManager . executeAsk ( qdef , tx ) ;
public class SelectList { /** * Select all options that display text matching the argument . * @ param label * the label to select */ public void selectByLabel ( String label ) { } }
getDispatcher ( ) . beforeSelect ( this , label ) ; new Select ( getElement ( ) ) . selectByVisibleText ( label ) ; if ( Config . getBoolConfigProperty ( ConfigProperty . ENABLE_GUI_LOGGING ) ) { logUIActions ( UIActions . SELECTED , label ) ; } getDispatcher ( ) . afterSelect ( this , label ) ;
public class GridCellElement { /** * Add a reference from this element to a cell that is containing this element . * @ param cell the grid cell . * @ return < code > true < / code > if the added cell is the reference ; * < code > false < / code > if the added cell is not the reference . */ public boolean addCellLink ( GridCell < P > cell ) { } }
if ( this . cells . add ( cell ) ) { return isReferenceCell ( cell ) ; } return false ;
public class GosuStringUtil { /** * < p > Returns padding using the specified delimiter repeated * to a given length . < / p > * < pre > * GosuStringUtil . padding ( 0 , ' e ' ) = " " * GosuStringUtil . padding ( 3 , ' e ' ) = " eee " * GosuStringUtil . padding ( - 2 , ' e ' ) = IndexOutOfBoundsException * < / pre > * < p > Note : this method doesn ' t not support padding with * < a href = " http : / / www . unicode . org / glossary / # supplementary _ character " > Unicode Supplementary Characters < / a > * as they require a pair of < code > char < / code > s to be represented . * If you are needing to support full I18N of your applications * consider using { @ link # repeat ( String , int ) } instead . * @ param repeat number of times to repeat delim * @ param padChar character to repeat * @ return String with repeated character * @ throws IndexOutOfBoundsException if < code > repeat & lt ; 0 < / code > * @ see # repeat ( String , int ) */ private static String padding ( int repeat , char padChar ) throws IndexOutOfBoundsException { } }
if ( repeat < 0 ) { throw new IndexOutOfBoundsException ( "Cannot pad a negative amount: " + repeat ) ; } final char [ ] buf = new char [ repeat ] ; for ( int i = 0 ; i < buf . length ; i ++ ) { buf [ i ] = padChar ; } return new String ( buf ) ;
public class DocumentRevsUtils { /** * Create the list of the revision IDs in ascending order . * The DocumentRevs is for a single tree . There should be one DocumentRevs for each open revision . * @ param documentRevs a deserialised JSON document including the _ revisions structure . See * < a target = " _ blank " href = " http : / / docs . couchdb . org / en / latest / api / document / common . html # getting - a - list - of - revisions " > * Getting a List of Revisions < / a > for more information . * @ return list of revision IDs in ascending order */ public static List < String > createRevisionIdHistory ( DocumentRevs documentRevs ) { } }
validateDocumentRevs ( documentRevs ) ; String latestRevision = documentRevs . getRev ( ) ; int generation = CouchUtils . generationFromRevId ( latestRevision ) ; assert generation == documentRevs . getRevisions ( ) . getStart ( ) ; List < String > revisionHistory = CouchUtils . couchStyleRevisionHistoryToFullRevisionIDs ( generation , documentRevs . getRevisions ( ) . getIds ( ) ) ; logger . log ( Level . FINER , "Revisions history: " + revisionHistory ) ; return revisionHistory ;
public class EvolutionResult { /** * Return a collector which collects the best result of an evolution stream . * < pre > { @ code * final Problem < ISeq < Point > , EnumGene < Point > , Double > tsm = . . . ; * final EvolutionResult < EnumGene < Point > , Double > result = Engine . builder ( tsm ) * . optimize ( Optimize . MINIMUM ) . build ( ) * . stream ( ) * . limit ( 100) * . collect ( EvolutionResult . toBestEvolutionResult ( ) ) ; * } < / pre > * If the collected { @ link EvolutionStream } is empty , the collector returns * < b > { @ code null } < / b > . * @ param < G > the gene type * @ param < C > the fitness type * @ return a collector which collects the best result of an evolution stream */ public static < G extends Gene < ? , G > , C extends Comparable < ? super C > > Collector < EvolutionResult < G , C > , ? , EvolutionResult < G , C > > toBestEvolutionResult ( ) { } }
return Collector . of ( MinMax :: < EvolutionResult < G , C > > of , MinMax :: accept , MinMax :: combine , mm -> mm . getMax ( ) != null ? mm . getMax ( ) . withTotalGenerations ( mm . getCount ( ) ) : null ) ;
public class IPv6Network { /** * Create an IPv6 network from the two addresses within the network . This will construct the smallest possible network ( " longest prefix * length " ) which contains both addresses . * @ param one address one * @ param two address two , should be bigger than address one * @ return ipv6 network */ public static IPv6Network fromTwoAddresses ( IPv6Address one , IPv6Address two ) { } }
final IPv6NetworkMask longestPrefixLength = IPv6NetworkMask . fromPrefixLength ( IPv6NetworkHelpers . longestPrefixLength ( one , two ) ) ; return new IPv6Network ( one . maskWithNetworkMask ( longestPrefixLength ) , longestPrefixLength ) ;
public class ParserDQL { /** * @ param exprList * @ param parseList * @ param start * @ param count * @ param isOption * @ param preferToThrow - if false , exceptions are quietly passed up the stack rather than thrown . * making it possible to distinguish ( breakpoint at ) serious exceptions * vs . false alarms that would merely indicate that the parser wandered * down an unfruitful path and needs to backtrack . * Exceptions should not be used for normal control flow . * @ return a non - thrown HsqlException that can be thrown later if / when the alternatives have run out . */ private HsqlException readExpression ( HsqlArrayList exprList , short [ ] parseList , int start , int count , boolean isOption , boolean preferToThrow ) { } }
/* disable 2 lines . . . void readExpression ( HsqlArrayList exprList , short [ ] parseList , int start , int count , boolean isOption ) { . . . disabled 2 lines */ // End of VoltDB extension for ( int i = start ; i < start + count ; i ++ ) { int exprType = parseList [ i ] ; switch ( exprType ) { case Tokens . QUESTION : { Expression e = null ; e = XreadAllTypesCommonValueExpression ( false ) ; exprList . add ( e ) ; continue ; } case Tokens . X_POS_INTEGER : { Expression e = null ; int integer = readInteger ( ) ; if ( integer < 0 ) { throw Error . error ( ErrorCode . X_42592 ) ; } e = new ExpressionValue ( ValuePool . getInt ( integer ) , Type . SQL_INTEGER ) ; exprList . add ( e ) ; continue ; } case Tokens . X_OPTION : { i ++ ; int expressionCount = exprList . size ( ) ; int position = getPosition ( ) ; int elementCount = parseList [ i ++ ] ; int initialExprIndex = exprList . size ( ) ; // A VoltDB extension to avoid using exceptions for flow control . HsqlException ex = null ; try { ex = readExpression ( exprList , parseList , i , elementCount , true , false ) ; } catch ( HsqlException caught ) { ex = caught ; } if ( ex != null ) { /* disable 4 lines . . . try { readExpression ( exprList , parseList , i , elementCount , true ) ; } catch ( HsqlException ex ) { . . . disabled 4 lines */ // End of VoltDB extension ex . setLevel ( compileContext . subQueryDepth ) ; if ( lastError == null || lastError . getLevel ( ) < ex . getLevel ( ) ) { lastError = ex ; } rewind ( position ) ; exprList . setSize ( expressionCount ) ; for ( int j = i ; j < i + elementCount ; j ++ ) { if ( parseList [ j ] == Tokens . QUESTION || parseList [ j ] == Tokens . X_KEYSET || parseList [ j ] == Tokens . X_POS_INTEGER ) { exprList . add ( null ) ; } } i += elementCount - 1 ; continue ; } if ( initialExprIndex == exprList . size ( ) ) { exprList . add ( null ) ; } i += elementCount - 1 ; continue ; } case Tokens . X_REPEAT : { i ++ ; int elementCount = parseList [ i ++ ] ; int parseIndex = i ; while ( true ) { int initialExprIndex = exprList . size ( ) ; // A VoltDB extension to avoid using exceptions for flow control . if ( preferToThrow ) { readExpression ( exprList , parseList , parseIndex , elementCount , true , true ) ; } else { HsqlException ex = null ; try { ex = readExpression ( exprList , parseList , parseIndex , elementCount , true , false ) ; } catch ( HsqlException caught ) { ex = caught ; } if ( ex != null ) { // TODO : There is likely a more elegant pre - emptive way of handling // the inevitable close paren that properly terminates a repeating group . // This filtering probably masks / ignores some syntax errors such as // a trailing comma right before the paren . if ( ex . getMessage ( ) . equalsIgnoreCase ( "unexpected token: )" ) ) { break ; } return ex ; } } /* disable 2 lines . . . readExpression ( exprList , parseList , parseIndex , elementCount , true ) ; . . . disabled 2 lines */ // End of VoltDB extension if ( exprList . size ( ) == initialExprIndex ) { break ; } } i += elementCount - 1 ; continue ; } case Tokens . X_KEYSET : { int elementCount = parseList [ ++ i ] ; Expression e = null ; if ( ArrayUtil . find ( parseList , token . tokenType , i + 1 , elementCount ) == - 1 ) { if ( ! isOption ) { // A VoltDB extension to avoid using exceptions for flow control . if ( ! preferToThrow ) { return unexpectedToken ( ) ; } // End of VoltDB extension throw unexpectedToken ( ) ; } } else { e = new ExpressionValue ( ValuePool . getInt ( token . tokenType ) , Type . SQL_INTEGER ) ; read ( ) ; } exprList . add ( e ) ; i += elementCount ; continue ; } case Tokens . OPENBRACKET : case Tokens . CLOSEBRACKET : case Tokens . COMMA : default : if ( token . tokenType != exprType ) { // A VoltDB extension to avoid using exceptions for flow control . if ( ! preferToThrow ) { return unexpectedToken ( ) ; } // End of VoltDB extension throw unexpectedToken ( ) ; } read ( ) ; continue ; } } // A VoltDB extension to avoid using exceptions for flow control . return null ; // Successful return - - no exception to pass back . // End of VoltDB extension
public class Admin { /** * @ throws PageException */ private void doGetTLDs ( ) throws PageException { } }
lucee . runtime . type . Query qry = new QueryImpl ( new String [ ] { "displayname" , "namespace" , "namespaceseparator" , "shortname" , "type" , "description" , "uri" , "elclass" , "elBundleName" , "elBundleVersion" , "source" } , new String [ ] { "varchar" , "varchar" , "varchar" , "varchar" , "varchar" , "varchar" , "varchar" , "varchar" , "varchar" , "varchar" , "varchar" } , 0 , "tlds" ) ; int dialect = "lucee" . equalsIgnoreCase ( getString ( "dialect" , "cfml" ) ) ? CFMLEngine . DIALECT_LUCEE : CFMLEngine . DIALECT_CFML ; TagLib [ ] libs = config . getTLDs ( dialect ) ; for ( int i = 0 ; i < libs . length ; i ++ ) { qry . addRow ( ) ; qry . setAt ( "displayname" , i + 1 , libs [ i ] . getDisplayName ( ) ) ; qry . setAt ( "namespace" , i + 1 , libs [ i ] . getNameSpace ( ) ) ; qry . setAt ( "namespaceseparator" , i + 1 , libs [ i ] . getNameSpaceSeparator ( ) ) ; qry . setAt ( "shortname" , i + 1 , libs [ i ] . getShortName ( ) ) ; qry . setAt ( "type" , i + 1 , libs [ i ] . getType ( ) ) ; qry . setAt ( "description" , i + 1 , libs [ i ] . getDescription ( ) ) ; qry . setAt ( "uri" , i + 1 , Caster . toString ( libs [ i ] . getUri ( ) ) ) ; qry . setAt ( "elclass" , i + 1 , libs [ i ] . getELClassDefinition ( ) . getClassName ( ) ) ; qry . setAt ( "elBundleName" , i + 1 , libs [ i ] . getELClassDefinition ( ) . getName ( ) ) ; qry . setAt ( "elBundleVersion" , i + 1 , libs [ i ] . getELClassDefinition ( ) . getVersionAsString ( ) ) ; qry . setAt ( "source" , i + 1 , StringUtil . emptyIfNull ( libs [ i ] . getSource ( ) ) ) ; } pageContext . setVariable ( getString ( "admin" , action , "returnVariable" ) , qry ) ;
public class FieldType { /** * Call through to { @ link DatabaseFieldConfig # getColumnDefinition ( ) } and * { @ link DatabaseFieldConfig # getFullColumnDefinition ( ) } . */ public String getColumnDefinition ( ) { } }
String full = fieldConfig . getFullColumnDefinition ( ) ; if ( full == null ) { return fieldConfig . getColumnDefinition ( ) ; } else { return full ; }
public class ContextFromVertx { /** * Like { @ link # parameter ( String , String ) } , but converts the * parameter to Boolean if found . * The parameter is decoded by default . * @ param name The name of the parameter * @ param defaultValue A default value if parameter not found . * @ return The value of the parameter of the defaultValue if not found . */ @ Override public Boolean parameterAsBoolean ( String name , boolean defaultValue ) { } }
return request . parameterAsBoolean ( name , defaultValue ) ;
public class AbstractBean { /** * Initializes the bean and its metadata */ @ Override public void internalInitialize ( BeanDeployerEnvironment environment ) { } }
preInitialize ( ) ; BeanLogger . LOG . creatingBean ( getType ( ) ) ; if ( getScope ( ) != null ) { proxyRequired = isNormalScoped ( ) ; } else { proxyRequired = false ; } BeanLogger . LOG . qualifiersUsed ( getQualifiers ( ) , this ) ; BeanLogger . LOG . usingName ( getName ( ) , this ) ; BeanLogger . LOG . usingScope ( getScope ( ) , this ) ;
public class KeyVaultClientBaseImpl { /** * List storage accounts managed by the specified key vault . This operation requires the storage / list permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param maxresults Maximum number of results to return in a page . If not specified the service will return up to 25 results . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < StorageAccountItem > > getStorageAccountsAsync ( final String vaultBaseUrl , final Integer maxresults , final ListOperationCallback < StorageAccountItem > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( getStorageAccountsSinglePageAsync ( vaultBaseUrl , maxresults ) , new Func1 < String , Observable < ServiceResponse < Page < StorageAccountItem > > > > ( ) { @ Override public Observable < ServiceResponse < Page < StorageAccountItem > > > call ( String nextPageLink ) { return getStorageAccountsNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class SerializationTools { /** * Object to byte array */ public static byte [ ] toByteArray ( Serializable object ) { } }
try { ByteArrayOutputStream buf = new ByteArrayOutputStream ( 1024 ) ; ObjectOutputStream objOut = new ObjectOutputStream ( buf ) ; objOut . writeObject ( object ) ; objOut . close ( ) ; return buf . toByteArray ( ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( "Cannot serialize object : " + e . toString ( ) ) ; }
public class SVNController { /** * Change log issues */ @ RequestMapping ( value = "changelog/{uuid}/issues" , method = RequestMethod . GET ) public SVNChangeLogIssues changeLogIssues ( @ PathVariable String uuid ) { } }
// Gets the change log SVNChangeLog changeLog = getChangeLog ( uuid ) ; // Cached ? SVNChangeLogIssues issues = changeLog . getIssues ( ) ; if ( issues != null ) { return issues ; } // Loads the issues issues = changeLogService . getChangeLogIssues ( changeLog ) ; // Stores in cache logCache . put ( uuid , changeLog . withIssues ( issues ) ) ; // OK return issues ;
public class RandomUtil { /** * 获得指定范围内的随机数 * @ param min 最小数 ( 包含 ) * @ param max 最大数 ( 不包含 ) * @ return 随机数 * @ since 4.0.9 */ public static BigDecimal randomBigDecimal ( BigDecimal min , BigDecimal max ) { } }
return NumberUtil . toBigDecimal ( getRandom ( ) . nextDouble ( min . doubleValue ( ) , max . doubleValue ( ) ) ) ;
public class BasicScaling { /** * < pre > * Duration of time after the last request that an instance must wait before * the instance is shut down . * < / pre > * < code > . google . protobuf . Duration idle _ timeout = 1 ; < / code > */ public com . google . protobuf . Duration getIdleTimeout ( ) { } }
return idleTimeout_ == null ? com . google . protobuf . Duration . getDefaultInstance ( ) : idleTimeout_ ;
public class AssignInstanceRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AssignInstanceRequest assignInstanceRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( assignInstanceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( assignInstanceRequest . getInstanceId ( ) , INSTANCEID_BINDING ) ; protocolMarshaller . marshall ( assignInstanceRequest . getLayerIds ( ) , LAYERIDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class WebMonitorEndpoint { @ Override public void grantLeadership ( final UUID leaderSessionID ) { } }
log . info ( "{} was granted leadership with leaderSessionID={}" , getRestBaseUrl ( ) , leaderSessionID ) ; leaderElectionService . confirmLeaderSessionID ( leaderSessionID ) ;
public class JCROrganizationServiceImpl { /** * { @ inheritDoc } */ @ Override public void start ( ) { } }
try { MigrationTool migrationTool = new MigrationTool ( this ) ; if ( migrationTool . migrationRequired ( ) ) { LOG . info ( "Detected old organization service structure." ) ; migrationTool . migrate ( ) ; } Session session = getStorageSession ( ) ; try { session . getItem ( this . storagePath ) ; // if found do nothing , the storage was initialized before . } catch ( PathNotFoundException e ) { createStructure ( ) ; } finally { session . logout ( ) ; } } catch ( RepositoryException e ) { throw new IllegalArgumentException ( "Can not configure storage" , e ) ; } super . start ( ) ;
public class FixedQuantilesCallback { /** * Create buckets using callback attributes * @ param stopwatch Target stopwatch * @ return Created buckets */ @ Override protected Buckets createBuckets ( Stopwatch stopwatch ) { } }
return createBuckets ( stopwatch , min * SimonClock . NANOS_IN_MILLIS , max * SimonClock . NANOS_IN_MILLIS , bucketNb ) ;
public class DeepWaterParameters { /** * Attempt to guess the problem type from the dataset * @ return */ ProblemType guessProblemType ( ) { } }
if ( _problem_type == auto ) { boolean image = false ; boolean text = false ; String first = null ; Vec v = train ( ) . vec ( 0 ) ; if ( v . isString ( ) || v . isCategorical ( ) /* small data parser artefact */ ) { BufferedString bs = new BufferedString ( ) ; first = v . atStr ( bs , 0 ) . toString ( ) ; try { ImageIO . read ( new File ( first ) ) ; image = true ; } catch ( Throwable t ) { } try { ImageIO . read ( new URL ( first ) ) ; image = true ; } catch ( Throwable t ) { } } if ( first != null ) { if ( ! image && ( first . endsWith ( ".jpg" ) || first . endsWith ( ".png" ) || first . endsWith ( ".tif" ) ) ) { image = true ; Log . warn ( "Cannot read first image at " + first + " - Check data." ) ; } else if ( v . isString ( ) && train ( ) . numCols ( ) <= 4 ) { // at most text , label , fold _ col , weight text = true ; } } if ( image ) return ProblemType . image ; else if ( text ) return ProblemType . text ; else return ProblemType . dataset ; } else { return _problem_type ; }
public class CommercePriceListAccountRelLocalServiceUtil { /** * Adds the commerce price list account rel to the database . Also notifies the appropriate model listeners . * @ param commercePriceListAccountRel the commerce price list account rel * @ return the commerce price list account rel that was added */ public static com . liferay . commerce . price . list . model . CommercePriceListAccountRel addCommercePriceListAccountRel ( com . liferay . commerce . price . list . model . CommercePriceListAccountRel commercePriceListAccountRel ) { } }
return getService ( ) . addCommercePriceListAccountRel ( commercePriceListAccountRel ) ;
public class HttpServerMBean { public void postDeregister ( ) { } }
_httpServer . removeEventListener ( this ) ; _httpServer = null ; if ( _mbeanMap != null ) _mbeanMap . clear ( ) ; _mbeanMap = null ; super . postDeregister ( ) ;
public class AbstractTreeWriter { /** * Get the tree label for the navigation bar . * @ return a content tree for the tree label */ protected Content getNavLinkTree ( ) { } }
Content li = HtmlTree . LI ( HtmlStyle . navBarCell1Rev , treeLabel ) ; return li ;
public class PlaybackConfigurationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PlaybackConfiguration playbackConfiguration , ProtocolMarshaller protocolMarshaller ) { } }
if ( playbackConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( playbackConfiguration . getAdDecisionServerUrl ( ) , ADDECISIONSERVERURL_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getCdnConfiguration ( ) , CDNCONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getDashConfiguration ( ) , DASHCONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getHlsConfiguration ( ) , HLSCONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getPlaybackConfigurationArn ( ) , PLAYBACKCONFIGURATIONARN_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getPlaybackEndpointPrefix ( ) , PLAYBACKENDPOINTPREFIX_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getSessionInitializationEndpointPrefix ( ) , SESSIONINITIALIZATIONENDPOINTPREFIX_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getSlateAdUrl ( ) , SLATEADURL_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getTranscodeProfileName ( ) , TRANSCODEPROFILENAME_BINDING ) ; protocolMarshaller . marshall ( playbackConfiguration . getVideoContentSourceUrl ( ) , VIDEOCONTENTSOURCEURL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PathUtils { /** * Converts a String array to a String with coma separator * example : String [ " a . soy " , " b . soy " ] - output : a . soy , b . soy * @ param array - array * @ return comma separated list */ public static String arrayToPath ( final String [ ] array ) { } }
if ( array == null ) { return "" ; } return Joiner . on ( "," ) . skipNulls ( ) . join ( array ) ;
public class ArrowConverter { /** * Convert a field vector to a column vector * @ param fieldVector the field vector to convert * @ param type the type of the column vector * @ return the converted ndarray */ public static INDArray convertArrowVector ( FieldVector fieldVector , ColumnType type ) { } }
DataBuffer buffer = null ; int cols = fieldVector . getValueCount ( ) ; ByteBuffer direct = ByteBuffer . allocateDirect ( fieldVector . getDataBuffer ( ) . capacity ( ) ) ; direct . order ( ByteOrder . nativeOrder ( ) ) ; fieldVector . getDataBuffer ( ) . getBytes ( 0 , direct ) ; direct . rewind ( ) ; switch ( type ) { case Integer : buffer = Nd4j . createBuffer ( direct , DataType . INT , cols , 0 ) ; break ; case Float : buffer = Nd4j . createBuffer ( direct , DataType . FLOAT , cols ) ; break ; case Double : buffer = Nd4j . createBuffer ( direct , DataType . DOUBLE , cols ) ; break ; case Long : buffer = Nd4j . createBuffer ( direct , DataType . LONG , cols ) ; break ; } return Nd4j . create ( buffer , new int [ ] { cols , 1 } ) ;
public class TermsByQueryResponse { /** * Serialize * @ param out the output * @ throws IOException */ @ Override public void writeTo ( StreamOutput out ) throws IOException { } }
super . writeTo ( out ) ; // Encode flag out . writeBoolean ( isPruned ) ; // Encode size out . writeVInt ( size ) ; // Encode type of encoding out . writeVInt ( termsEncoding . ordinal ( ) ) ; // Encode terms out . writeBytesRef ( encodedTerms ) ; // Release terms encodedTerms = null ;
public class EvaluationUtils { /** * Calculate the false positive rate from the false positive count and true negative count * @ param fpCount False positive count * @ param tnCount True negative count * @ param edgeCase Edge case values are used to avoid 0/0 * @ return False positive rate */ public static double falsePositiveRate ( long fpCount , long tnCount , double edgeCase ) { } }
// Edge case if ( fpCount == 0 && tnCount == 0 ) { return edgeCase ; } return fpCount / ( double ) ( fpCount + tnCount ) ;
public class Kmeans { /** * { @ inheritDoc } */ @ Override public Prediction _predictRecord ( Record r ) { } }
ModelParameters modelParameters = knowledgeBase . getModelParameters ( ) ; Map < Integer , Cluster > clusterMap = modelParameters . getClusterMap ( ) ; AssociativeArray clusterDistances = new AssociativeArray ( ) ; for ( Map . Entry < Integer , Cluster > e : clusterMap . entrySet ( ) ) { Integer clusterId = e . getKey ( ) ; Cluster c = e . getValue ( ) ; double distance = calculateDistance ( r , c . getCentroid ( ) ) ; clusterDistances . put ( clusterId , distance ) ; } Descriptives . normalize ( clusterDistances ) ; return new Prediction ( getSelectedClusterFromDistances ( clusterDistances ) , clusterDistances ) ;
public class AvroUtils { /** * Given a byte array and a DatumReader , decode an avro entity from the byte * array . Decodes using the avro BinaryDecoder . Return the constructed entity . * @ param bytes * The byte array to decode the entity from . * @ param reader * The DatumReader that will decode the byte array . * @ return The Avro entity . */ public static < T > T readAvroEntity ( byte [ ] bytes , DatumReader < T > reader ) { } }
Decoder decoder = new DecoderFactory ( ) . binaryDecoder ( bytes , null ) ; return AvroUtils . < T > readAvroEntity ( decoder , reader ) ;
public class BaseWorkflowExecutor { /** * Execute a step and handle flow control and error handler and log filter . * @ param executionContext context * @ param failedMap map for placing failure results * @ param resultList list of step results * @ param keepgoing true if the workflow should keepgoing on error * @ param wlistener listener * @ param c step number * @ param cmd step * @ return result and flow control */ public StepResultCapture executeWorkflowStep ( final StepExecutionContext executionContext , final Map < Integer , StepExecutionResult > failedMap , final List < StepExecutionResult > resultList , final boolean keepgoing , final WorkflowExecutionListener wlistener , final int c , final StepExecutionItem cmd ) { } }
if ( null != wlistener ) { wlistener . beginWorkflowItem ( c , cmd ) ; } // collab WorkflowStatusResultImpl result = WorkflowStatusResultImpl . builder ( ) . success ( false ) . build ( ) ; // wrap node failed listener ( if any ) and capture status results NodeRecorder stepCaptureFailedNodesListener = new NodeRecorder ( ) ; // create the new context for workflow execution ExecutionContextImpl . Builder wfRunContext = new ExecutionContextImpl . Builder ( executionContext ) ; replaceFailedNodesListenerInContext ( wfRunContext , stepCaptureFailedNodesListener , executionContext . getExecutionListener ( ) ) ; final FlowController stepController = new FlowController ( ) ; wfRunContext . flowControl ( stepController ) ; final DataOutput outputContext = new DataOutput ( ContextView . step ( c ) ) ; wfRunContext . outputContext ( outputContext ) ; ExecutionContextImpl wfRunContextBuilt = wfRunContext . build ( ) ; // execute the step item , and store the results final Map < Integer , StepExecutionResult > stepFailedMap = new HashMap < > ( ) ; StepExecutionResult stepResult = executeWFItem ( wfRunContextBuilt , stepFailedMap , c , cmd ) ; // node recorder report Map < String , NodeStepResult > nodeFailures = stepCaptureFailedNodesListener . getFailedNodes ( ) ; reportNodesMatched ( executionContext , stepCaptureFailedNodesListener ) ; // collect node data results WFSharedContext combinedResultData = new WFSharedContext ( ) ; combineResultData ( c , outputContext , combinedResultData , stepResult ) ; if ( stepController . isControlled ( ) ) { result = WorkflowStatusResultImpl . with ( stepController ) ; stepResult = controlledStepResult ( stepController , stepResult ) ; executionContext . getExecutionListener ( ) . log ( 3 , result . toString ( ) ) ; } result . setSuccess ( stepResult . isSuccess ( ) ) ; try { if ( ! result . isSuccess ( ) && cmd instanceof HasFailureHandler ) { final HasFailureHandler handles = ( HasFailureHandler ) cmd ; final StepExecutionItem handler = handles . getFailureHandler ( ) ; if ( null != handler ) { // if there is a failure , and a failureHandler item , execute the failure handler // set keepgoing = false , and store the results // will throw an exception on failure because keepgoing = false NodeRecorder handlerCaptureFailedNodesListener = new NodeRecorder ( ) ; ExecutionContextImpl . Builder wfHandlerContext = new ExecutionContextImpl . Builder ( executionContext ) ; replaceFailedNodesListenerInContext ( wfHandlerContext , handlerCaptureFailedNodesListener , executionContext . getExecutionListener ( ) ) ; // if multi - node , determine set of nodes to run handler on : ( failed node list only ) if ( stepCaptureFailedNodesListener . getMatchedNodes ( ) . size ( ) > 1 ) { HashSet < String > failedNodeList = new HashSet < > ( stepCaptureFailedNodesListener . getFailedNodes ( ) . keySet ( ) ) ; wfHandlerContext . nodeSelector ( SelectorUtils . nodeList ( failedNodeList ) ) ; } // add step failure data to data context addStepFailureContextData ( stepResult , wfHandlerContext ) ; // extract node - specific failure and set as node - context data addNodeStepFailureContextData ( stepResult , wfHandlerContext ) ; // add in data context results produced by the step wfHandlerContext . mergeSharedContext ( combinedResultData ) ; // allow flow control final FlowController handlerController = new FlowController ( ) ; wfHandlerContext . flowControl ( handlerController ) ; wfHandlerContext . outputContext ( outputContext ) ; Map < Integer , StepExecutionResult > handlerFailedMap = new HashMap < > ( ) ; if ( null != wlistener ) { wlistener . beginWorkflowItemErrorHandler ( c , cmd ) ; } StepExecutionResult handlerResult = executeWFItem ( wfHandlerContext . build ( ) , handlerFailedMap , c , handler ) ; boolean handlerSuccess = handlerResult . isSuccess ( ) ; if ( null != wlistener ) { wlistener . finishWorkflowItemErrorHandler ( c , cmd , handlerResult ) ; } // combine handler result data combineResultData ( c , outputContext , combinedResultData , handlerResult ) ; if ( handlerController . isControlled ( ) && handlerController . getControlBehavior ( ) == ControlBehavior . Halt ) { // handler called Halt ( ) result = WorkflowStatusResultImpl . with ( handlerController ) ; executionContext . getExecutionListener ( ) . log ( 3 , result . toString ( ) ) ; } else { // handle success conditions : // 1 . if keepgoing = true , then status from handler overrides original step // 2 . keepgoing = false , then status is the same as the original step , unless // the keepgoingOnSuccess is set to true and the handler succeeded boolean useHandlerResults = keepgoing ; if ( ! keepgoing && handlerSuccess && handler instanceof HandlerExecutionItem ) { useHandlerResults = ( ( HandlerExecutionItem ) handler ) . isKeepgoingOnSuccess ( ) ; } if ( useHandlerResults ) { result . setSuccess ( handlerSuccess ) ; stepResult = handlerResult ; stepFailedMap . clear ( ) ; stepFailedMap . putAll ( handlerFailedMap ) ; nodeFailures = handlerCaptureFailedNodesListener . getFailedNodes ( ) ; } } } } } catch ( RuntimeException t ) { // t . printStackTrace ( System . err ) ; stepResult = new StepExecutionResultImpl ( t , StepFailureReason . Unknown , t . getMessage ( ) ) ; throw t ; } finally { if ( null != wlistener ) { wlistener . finishWorkflowItem ( c , cmd , stepResult ) ; } } // report data resultList . add ( stepResult ) ; failedMap . putAll ( stepFailedMap ) ; // report node failures based on results of step and handler run . if ( null != executionContext . getExecutionListener ( ) && null != executionContext . getExecutionListener ( ) . getFailedNodesListener ( ) ) { if ( nodeFailures . size ( ) > 0 ) { executionContext . getExecutionListener ( ) . getFailedNodesListener ( ) . nodesFailed ( nodeFailures ) ; } else if ( result . isSuccess ( ) ) { executionContext . getExecutionListener ( ) . getFailedNodesListener ( ) . nodesSucceeded ( ) ; } } return new StepResultCapture ( stepResult , result , combinedResultData ) ;
public class ScanPackagesProcess { /** * Run Method . */ public void run ( ) { } }
Packages recPackages = ( Packages ) this . getMainRecord ( ) ; ProgramControl recPackagesControl = ( ProgramControl ) this . getRecord ( ProgramControl . PROGRAM_CONTROL_FILE ) ; try { recPackagesControl . edit ( ) ; recPackagesControl . getField ( ProgramControl . LAST_PACKAGE_UPDATE ) . setValue ( DateTimeField . currentTime ( ) ) ; this . scanProjects ( 0 ) ; recPackages . setKeyArea ( Packages . ID_KEY ) ; recPackages . close ( ) ; while ( recPackages . hasNext ( ) ) { recPackages . next ( ) ; Date timePackages = ( ( DateTimeField ) recPackages . getField ( Packages . LAST_UPDATED ) ) . getDateTime ( ) ; Date timeUpdated = ( ( DateTimeField ) recPackagesControl . getField ( ProgramControl . LAST_PACKAGE_UPDATE ) ) . getDateTime ( ) ; boolean bManual = recPackages . getField ( Packages . MANUAL ) . getState ( ) ; if ( ( ( timePackages == null ) || ( timePackages . before ( timeUpdated ) ) ) && ( bManual == false ) ) { recPackages . edit ( ) ; recPackages . remove ( ) ; } } recPackagesControl . set ( ) ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; } finally { recPackagesControl . free ( ) ; }
public class Vector3i { /** * Subtract the supplied vector from this one and store the result in * < code > this < / code > . * @ param v * the vector to subtract * @ return a vector holding the result */ public Vector3i sub ( Vector3ic v ) { } }
return sub ( v . x ( ) , v . y ( ) , v . z ( ) , thisOrNew ( ) ) ;
public class FileSourceSettingsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( FileSourceSettings fileSourceSettings , ProtocolMarshaller protocolMarshaller ) { } }
if ( fileSourceSettings == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( fileSourceSettings . getConvert608To708 ( ) , CONVERT608TO708_BINDING ) ; protocolMarshaller . marshall ( fileSourceSettings . getSourceFile ( ) , SOURCEFILE_BINDING ) ; protocolMarshaller . marshall ( fileSourceSettings . getTimeDelta ( ) , TIMEDELTA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GeoServiceBounds { /** * Build a bounding box feature . * @ param id the ID string * @ param southwest the southwest corner of the bounding box * @ param northeast the northeast corner of the bounding box * @ return the Feature for the new bounds */ public static Feature createBounds ( final String id , final LngLatAlt southwest , final LngLatAlt northeast ) { } }
final Feature feature = new Feature ( ) ; feature . setId ( id ) ; if ( northeast == null || southwest == null ) { throw new IllegalArgumentException ( "Must have a proper bounding box" ) ; } final double [ ] bbox = { southwest . getLongitude ( ) , southwest . getLatitude ( ) , northeast . getLongitude ( ) , northeast . getLatitude ( ) } ; final Polygon polygon = new Polygon ( ) ; feature . setGeometry ( polygon ) ; polygon . setBbox ( bbox ) ; final List < LngLatAlt > elements = new ArrayList < > ( 5 ) ; elements . add ( new LngLatAlt ( bbox [ SW_LNG ] , bbox [ SW_LAT ] ) ) ; elements . add ( new LngLatAlt ( bbox [ NE_LNG ] , bbox [ SW_LAT ] ) ) ; elements . add ( new LngLatAlt ( bbox [ NE_LNG ] , bbox [ NE_LAT ] ) ) ; elements . add ( new LngLatAlt ( bbox [ SW_LNG ] , bbox [ NE_LAT ] ) ) ; elements . add ( new LngLatAlt ( bbox [ SW_LNG ] , bbox [ SW_LAT ] ) ) ; polygon . add ( elements ) ; feature . setBbox ( bbox ) ; return feature ;
public class Exec { /** * Execute a command in a container . If there are multiple containers in the pod , uses the first * container in the Pod . * @ param pod The pod where the command is run . * @ param command The command to run * @ param container The container in the Pod where the command is run . * @ param stdin If true , pass a stdin stream into the container . * @ param tty If true , stdin is a TTY ( only applies if stdin is true ) */ public Process exec ( V1Pod pod , String [ ] command , String container , boolean stdin , boolean tty ) throws ApiException , IOException { } }
return exec ( pod . getMetadata ( ) . getNamespace ( ) , pod . getMetadata ( ) . getName ( ) , command , container , stdin , tty ) ;
public class LinkedDeque { /** * Unlinks the non - null element . */ private void unlink ( E e ) { } }
final E prev = e . getPrevious ( ) ; final E next = e . getNext ( ) ; if ( prev == null ) { first = next ; } else { prev . setNext ( next ) ; e . setPrevious ( null ) ; } if ( next == null ) { last = prev ; } else { next . setPrevious ( prev ) ; e . setNext ( null ) ; }
public class MediaType { /** * Sorts the given list of { @ code MediaType } objects by specificity . * < p > Given two media types : < ol > < li > if either media type has a { @ linkplain # isWildcardType ( ) wildcard type } , then * the media type without the wildcard is ordered before the other . < / li > < li > if the two media types have different * { @ linkplain # getType ( ) types } , then they are considered equal and remain their current order . < / li > < li > if either * media type has a { @ linkplain # isWildcardSubtype ( ) wildcard subtype } , then the media type without the wildcard is * sorted before the other . < / li > < li > if the two media types have different { @ linkplain # getSubtype ( ) subtypes } , then * they are considered equal and remain their current order . < / li > < li > if the two media types have different * { @ linkplain # getQualityValue ( ) quality value } , then the media type with the highest quality value is ordered * before the other . < / li > < li > if the two media types have a different amount of { @ linkplain # getParameter ( String ) * parameters } , then the media type with the most parameters is ordered before the other . < / li > < / ol > < p > For example : * < blockquote > audio / basic & lt ; audio / * & lt ; * & # 047 ; * < / blockquote > < blockquote > audio / * & lt ; audio / * ; q = 0.7; * audio / * ; q = 0.3 < / blockquote > < blockquote > audio / basic ; level = 1 & lt ; audio / basic < / blockquote > < blockquote > audio / basic * = = text / html < / blockquote > < blockquote > audio / basic = = audio / wave < / blockquote > * @ param mediaTypes the list of media types to be sorted . * @ see < a href = " http : / / tools . ietf . org / html / rfc7231 # section - 5.3.2 " > HTTP 1.1 : Semantics and Content , section * 5.3.2 < / a > */ public static void sortBySpecificity ( List < MediaType > mediaTypes ) { } }
Assert . notNull ( mediaTypes , "'mediaTypes' must not be null" ) ; if ( mediaTypes . size ( ) > 1 ) { Collections . sort ( mediaTypes , SPECIFICITY_COMPARATOR ) ; }
public class UtilDecompositons_ZDRM { /** * Creates a zeros matrix only if A does not already exist . If it does exist it will fill * the lower triangular portion with zeros . */ public static ZMatrixRMaj checkZerosLT ( ZMatrixRMaj A , int numRows , int numCols ) { } }
if ( A == null ) { return new ZMatrixRMaj ( numRows , numCols ) ; } else if ( numRows != A . numRows || numCols != A . numCols ) throw new IllegalArgumentException ( "Input is not " + numRows + " x " + numCols + " matrix" ) ; else { for ( int i = 0 ; i < A . numRows ; i ++ ) { int index = i * A . numCols * 2 ; int end = index + Math . min ( i , A . numCols ) * 2 ; ; while ( index < end ) { A . data [ index ++ ] = 0 ; } } } return A ;
public class BucketTimeSeries { /** * This method returns the value from the time - series as if it would be ordered ( i . e . , zero is now , 1 is the * previous moment , . . . ) . * @ param idx the zero based index * @ return the value associated to the zero based index */ public T getFromZeroBasedIdx ( final int idx ) { } }
if ( this . timeSeries != null && this . currentNowIdx != - 1 ) { // we can use the default validation , because the index still must be in the borders of the time - series validateIdx ( idx ) ; return get ( idx ( currentNowIdx + idx ) ) ; } else { return null ; }
public class RelativePathService { /** * Installs a path service . * @ param name the name to use for the service * @ param path the relative portion of the path * @ param possiblyAbsolute { @ code true } if { @ code path } may be an { @ link # isAbsoluteUnixOrWindowsPath ( String ) absolute path } * and should be { @ link AbsolutePathService installed as such } if it is , with any * { @ code relativeTo } parameter ignored * @ param relativeTo the name of the path that { @ code path } may be relative to * @ param serviceTarget the { @ link ServiceTarget } to use to install the service * @ return the ServiceController for the path service */ public static ServiceController < String > addService ( final ServiceName name , final String path , boolean possiblyAbsolute , final String relativeTo , final ServiceTarget serviceTarget ) { } }
if ( possiblyAbsolute && isAbsoluteUnixOrWindowsPath ( path ) ) { return AbsolutePathService . addService ( name , path , serviceTarget ) ; } RelativePathService service = new RelativePathService ( path ) ; ServiceBuilder < String > builder = serviceTarget . addService ( name , service ) . addDependency ( pathNameOf ( relativeTo ) , String . class , service . injectedPath ) ; ServiceController < String > svc = builder . install ( ) ; return svc ;
public class StylesheetUserPreferencesImpl { /** * / * ( non - Javadoc ) * @ see org . apereo . portal . layout . om . IStylesheetUserPreferences # removeLayoutAttribute ( java . lang . String , java . lang . String ) */ @ Override public String removeLayoutAttribute ( String nodeId , String name ) { } }
Validate . notEmpty ( nodeId , "nodeId cannot be null" ) ; Validate . notEmpty ( name , "name cannot be null" ) ; final Map < String , String > nodeAttributes = this . layoutAttributes . get ( nodeId ) ; if ( nodeAttributes == null ) { return null ; } return nodeAttributes . remove ( name ) ;
public class SourceLocation { /** * Finds all files with the given suffix that pass the include / exclude * filters in this source location . * @ param suffixes The set of suffixes to search for * @ param foundFiles The map in which to store the found files * @ param foundModules The map in which to store the found modules * @ param currentModule The current module * @ param permitSourcesInDefaultPackage true if sources in default package * are to be permitted * @ param inLinksrc true if in link source */ public void findSourceFiles ( Set < String > suffixes , Map < String , Source > foundFiles , Map < String , Module > foundModules , Module currentModule , boolean permitSourcesInDefaultPackage , boolean inLinksrc ) throws IOException { } }
try { Source . scanRoot ( path . toFile ( ) , suffixes , excludes , includes , foundFiles , foundModules , currentModule , permitSourcesInDefaultPackage , false , inLinksrc ) ; } catch ( ProblemException e ) { e . printStackTrace ( ) ; }
public class MQLinkPubSubBridgeItemStream { /** * Method isToBeDeleted . * @ return boolean */ public boolean isToBeDeleted ( ) { } }
if ( tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "isToBeDeleted" ) ; SibTr . exit ( tc , "isToBeDeleted" , toBeDeleted ) ; } return toBeDeleted . booleanValue ( ) ;
public class UsableURIFactory { /** * Fixup the domain label part of the authority . * We ' re more lax than the spec . in that we allow underscores . * @ param label Domain label to fix . * @ return Return fixed domain label . * @ throws URIException */ private String fixupDomainlabel ( String label ) throws URIException { } }
// apply IDN - punycoding , as necessary try { // TODO : optimize : only apply when necessary , or // keep cache of recent encodings label = IDNA . toASCII ( label ) ; } catch ( IDNAException e ) { if ( TextUtils . matches ( ACCEPTABLE_ASCII_DOMAIN , label ) ) { // domain name has ACE prefix , leading / trailing dash , or // underscore - - but is still a name we wish to tolerate ; // simply continue } else { // problematic domain : neither ASCII acceptable characters // nor IDN - punycodable , so throw exception // TODO : change to HeritrixURIException so distinguishable // from URIExceptions in library code URIException ue = new URIException ( e + " " + label ) ; ue . initCause ( e ) ; throw ue ; } } label = label . toLowerCase ( ) ; return label ;
public class NtlmUtil { /** * Accepts key multiple of 7 * Returns enc multiple of 8 * Multiple is the same like : 21 byte key gives 24 byte result */ static void E ( byte [ ] key , byte [ ] data , byte [ ] e ) throws ShortBufferException { } }
byte [ ] key7 = new byte [ 7 ] ; byte [ ] e8 = new byte [ 8 ] ; for ( int i = 0 ; i < key . length / 7 ; i ++ ) { System . arraycopy ( key , i * 7 , key7 , 0 , 7 ) ; Cipher des = Crypto . getDES ( key7 ) ; des . update ( data , 0 , data . length , e8 ) ; System . arraycopy ( e8 , 0 , e , i * 8 , 8 ) ; }
public class JawrWatchEventProcessor { /** * Returns true if there is no more event to process * @ return true if there is no more event to process */ public boolean hasNoEventToProcess ( ) { } }
long currentTime = Calendar . getInstance ( ) . getTimeInMillis ( ) ; return watchEvents . isEmpty ( ) && ( currentTime - lastProcessTime . get ( ) > bundlesHandler . getConfig ( ) . getSmartBundlingDelayAfterLastEvent ( ) ) ;
public class EpicsApi { /** * Deletes an epic . * < pre > < code > GitLab Endpoint : DELETE / groups / : id / epics / : epic _ iid < / code > < / pre > * @ param groupIdOrPath the group ID , path of the group , or a Group instance holding the group ID or path * @ param epicIid the IID of the epic to delete * @ throws GitLabApiException if any exception occurs */ public void deleteEpic ( Object groupIdOrPath , Integer epicIid ) throws GitLabApiException { } }
delete ( Response . Status . NO_CONTENT , null , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid ) ;
public class GVRViewManager { /** * returns . */ private void notifyMainSceneReady ( ) { } }
runOnTheFrameworkThread ( new Runnable ( ) { @ Override public void run ( ) { // Initialize the main scene getEventManager ( ) . sendEvent ( mMainScene , ISceneEvents . class , "onInit" , GVRViewManager . this , mMainScene ) ; // Late - initialize the main scene getEventManager ( ) . sendEvent ( mMainScene , ISceneEvents . class , "onAfterInit" ) ; } } ) ;
public class ReviewsImpl { /** * Get the Job Details for a Job Id . * @ param teamName Your Team Name . * @ param jobId Id of the job . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Job > getJobDetailsAsync ( String teamName , String jobId , final ServiceCallback < Job > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getJobDetailsWithServiceResponseAsync ( teamName , jobId ) , serviceCallback ) ;
public class ScriptRunner { /** * Prepares a < code > Task < / code > for ( subsequent ) execution . * @ param location Absolute or relative location of a Cernunnos script file . */ public Task compileTask ( String location ) { } }
// Assertions . if ( location == null ) { String msg = "Argument 'location' cannot be null." ; throw new IllegalArgumentException ( msg ) ; } Document doc = null ; URL origin = null ; try { origin = new URL ( new File ( "." ) . toURI ( ) . toURL ( ) , location ) ; doc = new SAXReader ( ) . read ( origin ) ; } catch ( Throwable t ) { String msg = "Error reading a script from the specified location: " + location ; throw new RuntimeException ( msg , t ) ; } return new TaskDecorator ( grammar . newTask ( doc . getRootElement ( ) , null ) , origin . toExternalForm ( ) ) ;
public class AbstractAdminController { /** * sets common context variables < br > * < ul > * < li > contentPage < / li > * < li > activeMenuName < / li > * < li > activeMenu < / li > * < li > rootContext < / li > * < li > < / li > * < / ul > * @ param model * @ param request * @ return */ protected String addCommonContextVars ( ModelMap model , HttpServletRequest request ) { } }
return addCommonContextVars ( model , request , null , null ) ;
public class RegistriesInner { /** * Updates the policies for the specified container registry . * @ param resourceGroupName The name of the resource group to which the container registry belongs . * @ param registryName The name of the container registry . * @ param registryPoliciesUpdateParameters The parameters for updating policies of a container registry . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the RegistryPoliciesInner object */ public Observable < ServiceResponse < RegistryPoliciesInner > > beginUpdatePoliciesWithServiceResponseAsync ( String resourceGroupName , String registryName , RegistryPoliciesInner registryPoliciesUpdateParameters ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( registryName == null ) { throw new IllegalArgumentException ( "Parameter registryName is required and cannot be null." ) ; } if ( registryPoliciesUpdateParameters == null ) { throw new IllegalArgumentException ( "Parameter registryPoliciesUpdateParameters is required and cannot be null." ) ; } Validator . validate ( registryPoliciesUpdateParameters ) ; final String apiVersion = "2017-10-01" ; return service . beginUpdatePolicies ( this . client . subscriptionId ( ) , resourceGroupName , registryName , apiVersion , registryPoliciesUpdateParameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < RegistryPoliciesInner > > > ( ) { @ Override public Observable < ServiceResponse < RegistryPoliciesInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < RegistryPoliciesInner > clientResponse = beginUpdatePoliciesDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class QuantilesCallback { /** * When a split is added , if buckets have been initialized , the value is added to appropriate bucket . */ @ Override public void onStopwatchAdd ( Stopwatch stopwatch , Split split , StopwatchSample sample ) { } }
onStopwatchSplit ( split . getStopwatch ( ) , split ) ;
public class BitZTradeServiceRaw { /** * 批量取消委托 * @ param ids * @ return * @ throws IOException */ public BitZTradeCancelListResult cancelAllEntrustSheet ( String ids ) throws IOException { } }
return bitz . cancelAllEntrustSheet ( apiKey , getTimeStamp ( ) , nonce , signer , ids ) ;
public class EnvironmentSettingsInner { /** * Create or replace an existing Environment Setting . This operation can take a while to complete . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ param environmentSettingName The name of the environment Setting . * @ param environmentSetting Represents settings of an environment , from which environment instances would be created * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < EnvironmentSettingInner > beginCreateOrUpdateAsync ( String resourceGroupName , String labAccountName , String labName , String environmentSettingName , EnvironmentSettingInner environmentSetting , final ServiceCallback < EnvironmentSettingInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , labAccountName , labName , environmentSettingName , environmentSetting ) , serviceCallback ) ;
public class MainToolbar { /** * Adds the login button + login text to the toolbar . This is only happened , * when the gui is not started via the kickstarter . * The Kickstarter overrides the " kickstarterEnvironment " context parameter * and set it to " true " , so the gui can detect , that is not necessary to offer * a login button . < / p > * component . */ private void addLoginButton ( ) { } }
VaadinSession session = VaadinSession . getCurrent ( ) ; if ( session != null ) { boolean kickstarter = Helper . isKickstarter ( session ) ; if ( ! kickstarter ) { addComponent ( lblUserName ) ; setComponentAlignment ( lblUserName , Alignment . MIDDLE_RIGHT ) ; addComponent ( btLogin ) ; setComponentAlignment ( btLogin , Alignment . MIDDLE_RIGHT ) ; } }
public class BaseDataJsonFieldBo { /** * Get a " data " ' s sub - attribute using d - path . * @ param dPath * @ param clazz * @ return * @ see DPathUtils */ public < T > T getDataAttr ( String dPath , Class < T > clazz ) { } }
Lock lock = lockForRead ( ) ; try { return DPathUtils . getValue ( dataJson , dPath , clazz ) ; } finally { lock . unlock ( ) ; }
public class RepositoryResolver { /** * Resolve a single name * Identical to { @ code resolve ( Collections . singleton ( toResolve ) ) } . */ public Collection < List < RepositoryResource > > resolve ( String toResolve ) throws RepositoryResolutionException { } }
return resolve ( Collections . singleton ( toResolve ) ) ;
public class UserCoreDao { /** * Query for ordered rows starting at the offset and returning no more than * the limit . * @ param orderBy * order by * @ param limit * chunk limit * @ param offset * chunk query offset * @ return result * @ since 3.1.0 */ public TResult queryForChunk ( String orderBy , int limit , long offset ) { } }
return query ( null , null , null , null , orderBy , buildLimit ( limit , offset ) ) ;
public class HelloSignClient { /** * Retrieves the necessary information to build an embedded signature * request . * @ param signatureId String ID of the signature request to embed * @ return EmbeddedResponse * @ throws HelloSignException thrown if there ' s a problem processing the * HTTP request or the JSON response . */ public EmbeddedResponse getEmbeddedSignUrl ( String signatureId ) throws HelloSignException { } }
String url = BASE_URI + EMBEDDED_SIGN_URL_URI + "/" + signatureId ; return new EmbeddedResponse ( httpClient . withAuth ( auth ) . post ( url ) . asJson ( ) ) ;
public class ElasticsearchRestClientFactoryBean { /** * We use convention over configuration : see https : / / github . com / dadoonet / spring - elasticsearch / issues / 3 */ static String [ ] computeTemplates ( String [ ] templates , String classpathRoot ) { } }
if ( templates == null || templates . length == 0 ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Automatic discovery is activated. Looking for template files in classpath under [{}]." , classpathRoot ) ; } ArrayList < String > autoTemplates = new ArrayList < > ( ) ; try { // Let ' s scan our resources List < String > scannedTemplates = TemplateFinder . findTemplates ( classpathRoot ) ; for ( String template : scannedTemplates ) { autoTemplates . add ( template ) ; } return autoTemplates . toArray ( new String [ autoTemplates . size ( ) ] ) ; } catch ( IOException | URISyntaxException e ) { logger . debug ( "Automatic discovery does not succeed for finding json files in classpath under " + classpathRoot + "." ) ; logger . trace ( "" , e ) ; } } return templates ;
public class ZkClient { /** * Add authentication information to the connection . This will be used to identify the user and check access to * nodes protected by ACLs * @ param scheme * @ param auth */ public void addAuthInfo ( final String scheme , final byte [ ] auth ) { } }
retryUntilConnected ( new Callable < Object > ( ) { @ Override public Object call ( ) throws Exception { _connection . addAuthInfo ( scheme , auth ) ; return null ; } } ) ;
public class HBaseReader { /** * Next . * @ return the h base data */ public HBaseDataWrapper next ( ) { } }
Result result = resultsIter . next ( ) ; counter ++ ; List < Cell > cells = result . listCells ( ) ; HBaseDataWrapper data = new HBaseDataWrapper ( tableName , result . getRow ( ) ) ; data . setColumns ( cells ) ; return data ;
public class GetMountTablePResponse { /** * < code > map & lt ; string , . alluxio . grpc . file . MountPointInfo & gt ; mountPoints = 1 ; < / code > */ public boolean containsMountPoints ( java . lang . String key ) { } }
if ( key == null ) { throw new java . lang . NullPointerException ( ) ; } return internalGetMountPoints ( ) . getMap ( ) . containsKey ( key ) ;
public class Getter { /** * Checks if the specified method is a getter . * @ param method The method . * @ param acceptPrefixless Flag to enable support of prefixless getters . * @ return */ public static boolean is ( Method method , boolean acceptPrefixless ) { } }
int length = method . getName ( ) . length ( ) ; if ( method . isPrivate ( ) || method . isStatic ( ) ) { return false ; } if ( ! method . getArguments ( ) . isEmpty ( ) ) { return false ; } if ( method . getReturnType ( ) . equals ( VOID ) ) { return false ; } if ( acceptPrefixless ) { return true ; } if ( method . getName ( ) . startsWith ( GET_PREFIX ) ) { return length > GET_PREFIX . length ( ) ; } if ( method . getName ( ) . startsWith ( IS_PREFIX ) && TypeUtils . isBoolean ( method . getReturnType ( ) ) ) { return length > IS_PREFIX . length ( ) ; } if ( method . getName ( ) . startsWith ( SHOULD_PREFIX ) && TypeUtils . isBoolean ( method . getReturnType ( ) ) ) { return length > SHOULD_PREFIX . length ( ) ; } return false ;
public class Yoke { /** * Adds a IMiddleware to the chain . If the middleware is an Error Handler IMiddleware then it is * treated differently and only the last error handler is kept . * You might want to add a middleware that is only supposed to run on a specific route ( path prefix ) . * In this case if the request path does not match the prefix the middleware is skipped automatically . * < pre > * yoke . use ( " / login " , new CustomLoginMiddleware ( ) ) ; * < / pre > * @ param route The route prefix for the middleware * @ param middleware The middleware add to the chain */ public Yoke use ( @ NotNull String route , @ NotNull IMiddleware ... middleware ) { } }
for ( IMiddleware m : middleware ) { if ( m instanceof Middleware ) { // when the type of middleware is error handler then the route is ignored and // the middleware is extracted from the execution chain into a special placeholder // for error handling if ( ( ( Middleware ) m ) . isErrorHandler ( ) ) { errorHandler = m ; } else { MountedMiddleware mm = new MountedMiddleware ( route , m ) ; middlewareList . add ( mm ) ; // register on JMX try { mbs . registerMBean ( new MiddlewareMBean ( mm ) , new ObjectName ( "com.jetdrone.yoke:type=Middleware@" + hashCode ( ) + ",route=" + ObjectName . quote ( route ) + ",name=" + m . getClass ( ) . getSimpleName ( ) + "@" + m . hashCode ( ) ) ) ; } catch ( MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e ) { throw new RuntimeException ( e ) ; } } // initialize the middleware with the current Vert . x and Logger ( ( Middleware ) m ) . init ( this , route ) ; } else { MountedMiddleware mm = new MountedMiddleware ( route , m ) ; middlewareList . add ( mm ) ; // register on JMX try { mbs . registerMBean ( new MiddlewareMBean ( mm ) , new ObjectName ( "com.jetdrone.yoke:type=Middleware@" + hashCode ( ) + ",route=" + ObjectName . quote ( route ) + ",name=" + m . getClass ( ) . getSimpleName ( ) + "@" + m . hashCode ( ) ) ) ; } catch ( MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e ) { throw new RuntimeException ( e ) ; } } } return this ;
public class CodingAnnotationStudy { /** * Creates a new { @ link CodingAnnotationItem } which has been coded with * the given annotation categories . Note that the order of the categories * must correspond to the raters ' indexes . Use null to represent missing * annotations , Invoking < code > addItem ( new Object [ ] { " A " , " B " , null , * " A " } ) < / code > indicates an annotation item which has been coded as * category " A " by rater 0 and 3 and as category " B " by rater 1 . Rater 2 * did not assign any category to the item . * @ throws IllegalArgumentException if the number of annotations does * not match the number of raters . */ public ICodingAnnotationItem addItemAsArray ( final Object [ ] annotations ) { } }
if ( annotations . length != raters . size ( ) ) { throw new IllegalArgumentException ( "Incorrect number of annotation units " + "(expected " + raters . size ( ) + ", given " + annotations . length + "). " + "For array params, use #addItemsAsArray instead of #addItem." ) ; } int itemIdx = items . size ( ) ; CodingAnnotationItem item = new CodingAnnotationItem ( raters . size ( ) ) ; for ( int raterIdx = 0 ; raterIdx < annotations . length ; raterIdx ++ ) { item . addUnit ( createUnit ( itemIdx , raterIdx , annotations [ raterIdx ] ) ) ; } items . add ( item ) ; return item ;
public class JStormUtils { /** * if the list contains repeated string , return the repeated string * this function is used to check whether bolt / spout has a duplicate id */ public static List < String > getRepeat ( List < String > list ) { } }
List < String > rtn = new ArrayList < > ( ) ; Set < String > idSet = new HashSet < > ( ) ; for ( String id : list ) { if ( idSet . contains ( id ) ) { rtn . add ( id ) ; } else { idSet . add ( id ) ; } } return rtn ;
public class ServiceLoader { /** * Creates a new service loader for the given service type and class loader . * @ param service The interface or abstract class representing the service * @ param loader The class loader to be used to load provider - configuration * files and provider classes , or null if the system class loader * ( or , failing that , the bootstrap class loader ) is to be used * @ return A new service loader */ public static < S > ServiceLoader < S > load ( Class < S > service , ClassLoader loader ) { } }
if ( loader == null ) { loader = service . getClassLoader ( ) ; } return new ServiceLoader < S > ( service , new ClassLoaderResourceLoader ( loader ) ) ;
public class PostCompilationAnalysis { /** * Perform post compilation analysis on the given ParsedElement . The other ParsedElements are supporting ones , * e . g . ClassStatements for inner classes */ public static void maybeAnalyze ( IParsedElement pe , IParsedElement ... other ) { } }
if ( ! shouldAnalyze ( ) ) { return ; } if ( ! ( pe instanceof IProgram ) && ( ! ( pe instanceof IClassFileStatement ) || classFileIsNotAnInterface ( ( IClassFileStatement ) pe ) ) ) { // pe . performUnusedElementAnalysis ( other ) ; }
public class AbstractCompositeConstraint { /** * Adds a beta constraint to this multi field OR constraint * @ param constraint */ public void addBetaConstraint ( BetaNodeFieldConstraint constraint ) { } }
if ( constraint != null ) { BetaNodeFieldConstraint [ ] tmp = this . betaConstraints ; this . betaConstraints = new BetaNodeFieldConstraint [ tmp . length + 1 ] ; System . arraycopy ( tmp , 0 , this . betaConstraints , 0 , tmp . length ) ; this . betaConstraints [ this . betaConstraints . length - 1 ] = constraint ; this . updateRequiredDeclarations ( constraint ) ; this . setType ( Constraint . ConstraintType . BETA ) ; }
public class TextWriterIntArray { /** * Serialize an object into the inline section . */ @ Override public void write ( TextWriterStream out , String label , int [ ] v ) { } }
StringBuilder buf = new StringBuilder ( ) ; if ( label != null ) { buf . append ( label ) . append ( '=' ) ; } if ( v != null ) { FormatUtil . formatTo ( buf , v , " " ) ; } out . inlinePrintNoQuotes ( buf . toString ( ) ) ;
public class LocaleHelper { /** * Create a { @ link Locale } from a given { @ link String } representation such as { @ link Locale # toString ( ) } or * { @ link Locale # forLanguageTag ( String ) } . * @ param locale the { @ link String } representation of the { @ link Locale } . * @ return the parsed { @ link Locale } . * @ since 7.3.0 */ public static Locale getLocale ( String locale ) { } }
String languageTag = locale . replace ( SEPARATOR_DEFAULT , SEPARATOR_FOR_LANGUAGE_TAG ) ; if ( ! languageTag . isEmpty ( ) && ( languageTag . charAt ( 0 ) == SEPARATOR_FOR_LANGUAGE_TAG ) ) { languageTag = languageTag . substring ( 1 ) ; // tolerant for accepting suffixes like " _ en _ US " / " - en - US " } return Locale . forLanguageTag ( languageTag ) ;
public class DefaultBeanClassBuilder { /** * Defines the class header for the given class definition */ protected ClassWriter buildClassHeader ( ClassLoader classLoader , ClassDefinition classDef ) { } }
boolean reactive = classDef . isReactive ( ) ; String [ ] original = classDef . getInterfaces ( ) ; int interfacesNr = original . length + ( reactive ? 2 : 1 ) ; String [ ] interfaces = new String [ interfacesNr ] ; for ( int i = 0 ; i < original . length ; i ++ ) { interfaces [ i ] = BuildUtils . getInternalType ( original [ i ] ) ; } interfaces [ original . length ] = BuildUtils . getInternalType ( GeneratedFact . class . getName ( ) ) ; if ( reactive ) { interfaces [ original . length + 1 ] = BuildUtils . getInternalType ( ReactiveObject . class . getName ( ) ) ; } int classModifiers = Opcodes . ACC_PUBLIC + Opcodes . ACC_SUPER ; if ( classDef . isAbstrakt ( ) ) { classModifiers += Opcodes . ACC_ABSTRACT ; } ClassWriter cw = createClassWriter ( classLoader , classModifiers , BuildUtils . getInternalType ( classDef . getClassName ( ) ) , null , BuildUtils . getInternalType ( classDef . getSuperClass ( ) ) , interfaces ) ; buildClassAnnotations ( classDef , cw ) ; cw . visitSource ( classDef . getClassName ( ) + ".java" , null ) ; return cw ;
public class JSONDeserializer { /** * Populate object from json object . * @ param objectInstance * the object instance * @ param objectResolvedType * the object resolved type * @ param jsonVal * the json val * @ param classFieldCache * the class field cache * @ param idToObjectInstance * a map from id to object instance * @ param collectionElementAdders * the collection element adders */ private static void populateObjectFromJsonObject ( final Object objectInstance , final Type objectResolvedType , final Object jsonVal , final ClassFieldCache classFieldCache , final Map < CharSequence , Object > idToObjectInstance , final List < Runnable > collectionElementAdders ) { } }
// Leave objectInstance empty ( or leave fields null ) if jsonVal is null if ( jsonVal == null ) { return ; } // Check jsonVal is JSONObject or JSONArray final boolean isJsonObject = jsonVal instanceof JSONObject ; final boolean isJsonArray = jsonVal instanceof JSONArray ; if ( ! ( isJsonArray || isJsonObject ) ) { throw new IllegalArgumentException ( "Expected JSONObject or JSONArray, got " + jsonVal . getClass ( ) . getSimpleName ( ) ) ; } final JSONObject jsonObject = isJsonObject ? ( JSONObject ) jsonVal : null ; final JSONArray jsonArray = isJsonArray ? ( JSONArray ) jsonVal : null ; // Check concrete type of object instance final Class < ? > rawType = objectInstance . getClass ( ) ; final boolean isMap = Map . class . isAssignableFrom ( rawType ) ; @ SuppressWarnings ( "unchecked" ) final Map < Object , Object > mapInstance = isMap ? ( Map < Object , Object > ) objectInstance : null ; final boolean isCollection = Collection . class . isAssignableFrom ( rawType ) ; @ SuppressWarnings ( "unchecked" ) final Collection < Object > collectionInstance = isCollection ? ( Collection < Object > ) objectInstance : null ; final boolean isArray = rawType . isArray ( ) ; final boolean isObj = ! ( isMap || isCollection || isArray ) ; if ( ( isMap || isObj ) != isJsonObject || ( isCollection || isArray ) != isJsonArray ) { throw new IllegalArgumentException ( "Wrong JSON type for class " + objectInstance . getClass ( ) . getName ( ) ) ; } // Handle concrete subclasses of generic classes , e . g . ClassInfoList extends List < ClassInfo > Type objectResolvedTypeGeneric = objectResolvedType ; if ( objectResolvedType instanceof Class < ? > ) { final Class < ? > objectResolvedCls = ( Class < ? > ) objectResolvedType ; if ( Map . class . isAssignableFrom ( objectResolvedCls ) ) { if ( ! isMap ) { throw new IllegalArgumentException ( "Got an unexpected map type" ) ; } objectResolvedTypeGeneric = objectResolvedCls . getGenericSuperclass ( ) ; } else if ( Collection . class . isAssignableFrom ( objectResolvedCls ) ) { if ( ! isCollection ) { throw new IllegalArgumentException ( "Got an unexpected map type" ) ; } objectResolvedTypeGeneric = objectResolvedCls . getGenericSuperclass ( ) ; } } // Get type arguments of resolved type of object , and resolve any type variables TypeResolutions typeResolutions ; // keyType is the first type parameter for maps , otherwise null Type mapKeyType ; // valueType is the component type for arrays , the second type parameter for maps , // the first type parameter for collections , or null for standard objects ( since // fields may be of a range of different types for standard objects ) Type commonResolvedValueType ; Class < ? > arrayComponentType ; boolean is1DArray ; if ( objectResolvedTypeGeneric instanceof Class < ? > ) { // Not a Map or Collection subclass typeResolutions = null ; mapKeyType = null ; final Class < ? > objectResolvedCls = ( Class < ? > ) objectResolvedTypeGeneric ; arrayComponentType = isArray ? objectResolvedCls . getComponentType ( ) : null ; is1DArray = isArray && ! arrayComponentType . isArray ( ) ; commonResolvedValueType = null ; } else if ( objectResolvedTypeGeneric instanceof ParameterizedType ) { // Get mapping from type variables to resolved types , by comparing the concrete type arguments // of the expected type to its type parameters final ParameterizedType parameterizedResolvedType = ( ParameterizedType ) objectResolvedTypeGeneric ; typeResolutions = new TypeResolutions ( parameterizedResolvedType ) ; // Correlate type variables with resolved types final int numTypeArgs = typeResolutions . resolvedTypeArguments . length ; if ( isMap && numTypeArgs != 2 ) { throw new IllegalArgumentException ( "Wrong number of type arguments for Map: got " + numTypeArgs + "; expected 2" ) ; } else if ( isCollection && numTypeArgs != 1 ) { throw new IllegalArgumentException ( "Wrong number of type arguments for Collection: got " + numTypeArgs + "; expected 1" ) ; } mapKeyType = isMap ? typeResolutions . resolvedTypeArguments [ 0 ] : null ; commonResolvedValueType = isMap ? typeResolutions . resolvedTypeArguments [ 1 ] : isCollection ? typeResolutions . resolvedTypeArguments [ 0 ] : null ; is1DArray = false ; arrayComponentType = null ; } else { throw new IllegalArgumentException ( "Got illegal type: " + objectResolvedTypeGeneric ) ; } final Class < ? > commonValueRawType = commonResolvedValueType == null ? null : JSONUtils . getRawType ( commonResolvedValueType ) ; // For maps and collections , or 1D arrays , all the elements are of the same type . // Look up the constructor for the value type just once for speed . Constructor < ? > commonValueConstructorWithSizeHint ; Constructor < ? > commonValueDefaultConstructor ; if ( isMap || isCollection || ( is1DArray && ! JSONUtils . isBasicValueType ( arrayComponentType ) ) ) { // Get value type constructor for Collection , Map or 1D array commonValueConstructorWithSizeHint = classFieldCache . getConstructorWithSizeHintForConcreteTypeOf ( is1DArray ? arrayComponentType : commonValueRawType ) ; if ( commonValueConstructorWithSizeHint != null ) { // No need for a default constructor if there is a constructor that takes a size hint commonValueDefaultConstructor = null ; } else { commonValueDefaultConstructor = classFieldCache . getDefaultConstructorForConcreteTypeOf ( is1DArray ? arrayComponentType : commonValueRawType ) ; } } else { // There is no single constructor for the fields of objects , and arrays and basic value types // have no constructor commonValueConstructorWithSizeHint = null ; commonValueDefaultConstructor = null ; } // For standard objects , look up the list of deserializable fields final ClassFields classFields = isObj ? classFieldCache . get ( rawType ) : null ; // Need to deserialize items in the same order as serialization : create all deserialized objects // at the current level in Pass 1 , recording any ids that are found , then recurse into child nodes // in Pass 2 after objects at the current level have all been instantiated . ArrayList < ObjectInstantiation > itemsToRecurseToInPass2 = new ArrayList < > ( ) ; // Pass 1 : Convert JSON objects in JSONObject items into Java objects final int numItems = isJsonObject ? jsonObject . items . size ( ) : isJsonArray ? jsonArray . items . size ( ) : /* can ' t happen */ 0 ; for ( int i = 0 ; i < numItems ; i ++ ) { // Iterate through items of JSONObject or JSONArray ( key is null for JSONArray ) final Entry < String , Object > jsonObjectItem = isJsonObject ? jsonObject . items . get ( i ) : null ; final String itemJsonKey ; final Object itemJsonValue ; if ( isJsonObject ) { itemJsonKey = jsonObjectItem . getKey ( ) ; itemJsonValue = jsonObjectItem . getValue ( ) ; } else if ( isJsonArray ) { itemJsonKey = null ; itemJsonValue = jsonArray . items . get ( i ) ; } else { // Can ' t happen ( keep static analyzers happy ) throw ClassGraphException . newClassGraphException ( "This exception should not be thrown" ) ; } final boolean itemJsonValueIsJsonObject = itemJsonValue instanceof JSONObject ; final boolean itemJsonValueIsJsonArray = itemJsonValue instanceof JSONArray ; final JSONObject itemJsonValueJsonObject = itemJsonValueIsJsonObject ? ( JSONObject ) itemJsonValue : null ; final JSONArray itemJsonValueJsonArray = itemJsonValueIsJsonArray ? ( JSONArray ) itemJsonValue : null ; // If this is a standard object , look up the field info in the type cache FieldTypeInfo fieldTypeInfo ; if ( isObj ) { // Standard objects must interpret the key as a string , since field names are strings . // Look up field name directly , using the itemJsonKey string fieldTypeInfo = classFields . fieldNameToFieldTypeInfo . get ( itemJsonKey ) ; if ( fieldTypeInfo == null ) { throw new IllegalArgumentException ( "Field " + rawType . getName ( ) + "." + itemJsonKey + " does not exist or is not accessible, non-final, and non-transient" ) ; } } else { fieldTypeInfo = null ; } // Standard objects have a different type for each field ; arrays have a nested value type ; // collections and maps have a single common value type for all elements . final Type resolvedItemValueType = // For objects , finish resolving partially resolve field types using the set of type // resolutions found by comparing the resolved type of the concrete containing object // with its generic type . ( Fields were partially resolved before by substituting type // arguments of subclasses into type variables of superclasses . ) isObj ? fieldTypeInfo . getFullyResolvedFieldType ( typeResolutions ) // For arrays , the item type is the array component type : isArray ? arrayComponentType // For collections and maps , the value type is the same for all items : commonResolvedValueType ; // Construct an object of the type needed to hold the value final Object instantiatedItemObject ; if ( itemJsonValue == null ) { // If JSON value is null , no need to recurse to deserialize the value instantiatedItemObject = null ; } else if ( resolvedItemValueType == Object . class ) { // For Object - typed fields , we can only deserialize a JSONObject to Map < Object , Object > // or a JSONArray to List < Object > , since we don ' t have any other type information if ( itemJsonValueIsJsonObject ) { instantiatedItemObject = new HashMap < > ( ) ; if ( itemsToRecurseToInPass2 == null ) { itemsToRecurseToInPass2 = new ArrayList < > ( ) ; } itemsToRecurseToInPass2 . add ( new ObjectInstantiation ( instantiatedItemObject , ParameterizedTypeImpl . MAP_OF_UNKNOWN_TYPE , itemJsonValue ) ) ; } else if ( itemJsonValueIsJsonArray ) { instantiatedItemObject = new ArrayList < > ( ) ; if ( itemsToRecurseToInPass2 == null ) { itemsToRecurseToInPass2 = new ArrayList < > ( ) ; } itemsToRecurseToInPass2 . add ( new ObjectInstantiation ( instantiatedItemObject , ParameterizedTypeImpl . LIST_OF_UNKNOWN_TYPE , itemJsonValue ) ) ; } else { // Deserialize basic JSON value for assigning to Object - typed field or as Object - typed element instantiatedItemObject = jsonBasicValueToObject ( itemJsonValue , resolvedItemValueType , /* convertStringToNumber = */ false ) ; } } else if ( JSONUtils . isBasicValueType ( resolvedItemValueType ) ) { // For non - recursive ( basic ) value types , just convert the values directly . if ( itemJsonValueIsJsonObject || itemJsonValueIsJsonArray ) { throw new IllegalArgumentException ( "Got JSONObject or JSONArray type when expecting a simple value type" ) ; } // Deserialize basic JSON value instantiatedItemObject = jsonBasicValueToObject ( itemJsonValue , resolvedItemValueType , /* convertStringToNumber = */ false ) ; } else { // Value type is a recursive type ( has fields or items ) if ( CharSequence . class . isAssignableFrom ( itemJsonValue . getClass ( ) ) ) { // This must be an id ref - - it is a string in a position that requires a recursive type . // Look up JSON reference , based on the id in itemJsonValue . final Object linkedObject = idToObjectInstance . get ( itemJsonValue ) ; if ( linkedObject == null ) { // Since we are deserializing objects in the same order as they were // serialized , this should not happen throw new IllegalArgumentException ( "Object id not found: " + itemJsonValue ) ; } // Use linked value in place of a new object instantiation , but don ' t recurse instantiatedItemObject = linkedObject ; } else { // For other items of recursive type ( maps , collections , or general objects ) , // create an empty object instance for the item if ( ! itemJsonValueIsJsonObject && ! itemJsonValueIsJsonArray ) { throw new IllegalArgumentException ( "Got simple value type when expecting a JSON object or JSON array" ) ; } try { // Call the appropriate constructor for the item , whether its type is array , Collection , // Map or other class type . For collections and Maps , call the size hint constructor // for speed when adding items . final int numSubItems = itemJsonValueIsJsonObject ? itemJsonValueJsonObject . items . size ( ) : itemJsonValueIsJsonArray ? itemJsonValueJsonArray . items . size ( ) : /* can ' t happen */ 0 ; if ( ( resolvedItemValueType instanceof Class < ? > && ( ( Class < ? > ) resolvedItemValueType ) . isArray ( ) ) ) { // Instantiate inner array with same number of items as the inner JSONArray if ( ! itemJsonValueIsJsonArray ) { throw new IllegalArgumentException ( "Expected JSONArray, got " + itemJsonValue . getClass ( ) . getName ( ) ) ; } instantiatedItemObject = Array . newInstance ( ( ( Class < ? > ) resolvedItemValueType ) . getComponentType ( ) , numSubItems ) ; } else { // For maps and collections , all the elements are of the same type if ( isCollection || isMap || is1DArray ) { // Instantiate a Map or Collection , with a size hint if possible instantiatedItemObject = commonValueConstructorWithSizeHint != null // Instantiate collection or map with size hint ? commonValueConstructorWithSizeHint . newInstance ( numSubItems ) // Instantiate other object types : commonValueDefaultConstructor != null ? commonValueDefaultConstructor . newInstance ( ) : /* can ' t happen */ null ; } else if ( isObj ) { // For object types , each field has its own constructor , and the constructor can // vary if the field type is completely generic ( e . g . " T field " ) . final Constructor < ? > valueConstructorWithSizeHint = fieldTypeInfo . getConstructorForFieldTypeWithSizeHint ( resolvedItemValueType , classFieldCache ) ; if ( valueConstructorWithSizeHint != null ) { instantiatedItemObject = valueConstructorWithSizeHint . newInstance ( numSubItems ) ; } else { instantiatedItemObject = fieldTypeInfo . getDefaultConstructorForFieldType ( resolvedItemValueType , classFieldCache ) . newInstance ( ) ; } } else if ( isArray && ! is1DArray ) { // Construct next innermost array for an array of 2 + dimensions instantiatedItemObject = Array . newInstance ( rawType . getComponentType ( ) , numSubItems ) ; } else { throw new IllegalArgumentException ( "Got illegal type" ) ; } } } catch ( final ReflectiveOperationException | SecurityException e ) { throw new IllegalArgumentException ( "Could not instantiate type " + resolvedItemValueType , e ) ; } // Look up any id field in the object ( it will be the first field ) , and if present , // add it to the idToObjectInstance map , so that it is available before recursing // into any sibling objects . if ( itemJsonValue instanceof JSONObject ) { final JSONObject itemJsonObject = ( JSONObject ) itemJsonValue ; if ( itemJsonObject . objectId != null ) { idToObjectInstance . put ( itemJsonObject . objectId , instantiatedItemObject ) ; } } // Defer recursing into items if ( itemsToRecurseToInPass2 == null ) { itemsToRecurseToInPass2 = new ArrayList < > ( ) ; } itemsToRecurseToInPass2 . add ( new ObjectInstantiation ( instantiatedItemObject , resolvedItemValueType , itemJsonValue ) ) ; } } // Add instantiated items to parent object if ( isObj ) { fieldTypeInfo . setFieldValue ( objectInstance , instantiatedItemObject ) ; } else if ( isMap ) { // For maps , key type should be deserialized from strings , to support e . g . Integer as a key type . // This only works for basic object types though ( String , Integer , Enum , etc . ) final Object mapKey = jsonBasicValueToObject ( itemJsonKey , mapKeyType , /* convertStringToNumber = */ true ) ; mapInstance . put ( mapKey , instantiatedItemObject ) ; } else if ( isArray ) { Array . set ( objectInstance , i , instantiatedItemObject ) ; } else if ( isCollection ) { // Can ' t add partially - deserialized item objects to Collections yet , since their // hashCode ( ) and equals ( ) methods may depend upon fields that have not yet been set . collectionElementAdders . add ( new Runnable ( ) { @ Override public void run ( ) { collectionInstance . add ( instantiatedItemObject ) ; } } ) ; } } // Pass 2 : Recurse into child items to populate child fields . if ( itemsToRecurseToInPass2 != null ) { for ( final ObjectInstantiation i : itemsToRecurseToInPass2 ) { populateObjectFromJsonObject ( i . objectInstance , i . type , i . jsonVal , classFieldCache , idToObjectInstance , collectionElementAdders ) ; } }
public class SoapClient { /** * 创建SOAP客户端 * @ param url WS的URL地址 * @ param protocol 协议 , 见 { @ link SoapProtocol } * @ param namespaceURI 方法上的命名空间URI * @ return { @ link SoapClient } * @ since 4.5.6 */ public static SoapClient create ( String url , SoapProtocol protocol , String namespaceURI ) { } }
return new SoapClient ( url , protocol , namespaceURI ) ;
public class JmsConnectionImpl { /** * * * * * * INTERFACE METHODS * * * * * */ @ Override public Session createSession ( boolean transacted , int acknowledgeMode ) throws JMSException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "createSession" , new Object [ ] { transacted , acknowledgeMode } ) ; JmsSessionImpl jmsSession = null ; // throw an exception if the connection is closed . checkClosed ( ) ; // mark that the client id cannot be changed . fixClientID ( ) ; // enforce consistency on transacted flag and acknowledge mode . if ( transacted ) { acknowledgeMode = Session . SESSION_TRANSACTED ; } if ( ! transacted && isManaged ) { if ( acknowledgeMode == Session . CLIENT_ACKNOWLEDGE ) { throw ( JMSException ) JmsErrorUtils . newThrowable ( JMSException . class , "INVALID_ACKNOWLEDGE_MODE_CWSIA0514" , new Object [ ] { acknowledgeMode } , tc ) ; } // if the ackmode is dups _ ok then set it to dups _ ok , for all the other combination // set it to auto _ ack if ( acknowledgeMode == Session . DUPS_OK_ACKNOWLEDGE ) acknowledgeMode = Session . DUPS_OK_ACKNOWLEDGE ; else acknowledgeMode = Session . AUTO_ACKNOWLEDGE ; } // create the jca session . boolean internallyTransacted = ( transacted || acknowledgeMode == Session . CLIENT_ACKNOWLEDGE || acknowledgeMode == Session . DUPS_OK_ACKNOWLEDGE ) ; JmsJcaSession jcaSession = createJcaSession ( internallyTransacted ) ; // Lock here so that another thread using the same connection doesn ' t reset the // coreConnection to something else as as we are instantiating the Session synchronized ( this ) { try { // We potentially have a new coreConnection becuase the creation of the JcaSession // detected that the coreConnection is now invalid and so a new managedConnection // and coreConnection was created . So reset the coreConnection for this JmsConnection // to the new one or we just set the old one again . coreConnection = jcaSession . getSICoreConnection ( ) ; } catch ( IllegalStateException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.api.jms.impl.JmsConnectionImpl" , "createSession#2" , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "createSession" , jmsSession ) ; throw ( JMSException ) JmsErrorUtils . newThrowable ( JMSException . class , "JCA_CREATE_SESS_CWSIA0024" , null , e , "JmsConnectionImpl.createSession#2" , this , tc ) ; } // instantiate the jms session , and add it to this connection ' s session list . jmsSession = instantiateSession ( transacted , acknowledgeMode , coreConnection , jcaSession ) ; } synchronized ( stateLock ) { sessions . add ( jmsSession ) ; // d353701 - output a warning message if there are ' lots ' of sessions active . if ( sessions . size ( ) % SESSION_WARNING_THRESHOLD == 0 ) { // We wish to tell the user which line of their application created the session , // so we must obtain a line of stack trace from their application . String errorLocation = JmsErrorUtils . getFirstApplicationStackString ( ) ; SibTr . warning ( tc , "MANY_SESSIONS_WARNING_CWSIA0027" , new Object [ ] { "" + sessions . size ( ) , errorLocation } ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "createSession" , jmsSession ) ; return jmsSession ;
public class Text { /** * Sets the capacity of this Text object to < em > at least < / em > * < code > len < / code > bytes . If the current buffer is longer , * then the capacity and existing content of the buffer are * unchanged . If < code > len < / code > is larger * than the current capacity , the Text object ' s capacity is * increased to match . * @ param len the number of bytes we need * @ param keepData should the old data be kept */ private void setCapacity ( int len , boolean keepData ) { } }
if ( bytes == null || bytes . length < len ) { byte [ ] newBytes = new byte [ len ] ; if ( bytes != null && keepData ) { System . arraycopy ( bytes , 0 , newBytes , 0 , length ) ; } bytes = newBytes ; }
public class StringKit { /** * we can replace * if ( doMethod1 ( ) ! = null ) { * return doMethod1 ( ) * } else { * return doMethod2 ( ) * with * return notBlankElse ( bar : : getName , bar : : getNickName ) * @ param s1 Supplier * @ param s2 Supplier */ public static < T > T noNullElseGet ( @ NonNull Supplier < T > s1 , @ NonNull Supplier < T > s2 ) { } }
T t1 = s1 . get ( ) ; if ( t1 != null ) { return t1 ; } return s2 . get ( ) ;
public class SeGoodsSpecifics { /** * < p > Usually it ' s simple setter for model ID . < / p > * @ param pItsId model ID */ @ Override public final void setItsId ( final SeGoodsSpecificsId pItsId ) { } }
this . itsId = pItsId ; if ( this . itsId != null ) { setSpecifics ( this . itsId . getSpecifics ( ) ) ; setItem ( this . itsId . getItem ( ) ) ; } else { setSpecifics ( null ) ; setItem ( null ) ; }
public class JmsSession { /** * Method that closes the opened Context { @ link # openContext ( ) } , by * committing or rollback it . * @ throws EFapsException on error * @ see # detach ( ) */ public void closeContext ( ) throws EFapsException { } }
if ( isLogedIn ( ) ) { try { if ( ! Context . isTMNoTransaction ( ) ) { if ( Context . isTMActive ( ) ) { Context . commit ( ) ; } else { Context . rollback ( ) ; } } } catch ( final SecurityException e ) { throw new EFapsException ( "SecurityException" , e ) ; } catch ( final IllegalStateException e ) { throw new EFapsException ( "IllegalStateException" , e ) ; } }
public class DestinationManager { /** * This method is used to perform local Destination reconciliation tasks */ public void reconcileLocal ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reconcileLocal" ) ; // Set reconciling flag to false reconciling = false ; DestinationTypeFilter filter = new DestinationTypeFilter ( ) ; filter . LOCAL = Boolean . TRUE ; filter . UNRECONCILED = Boolean . TRUE ; SIMPIterator itr = destinationIndex . iterator ( filter ) ; while ( itr . hasNext ( ) ) { BaseDestinationHandler dh = ( BaseDestinationHandler ) itr . next ( ) ; try { // Set the deletion flag in the DH persistently . A transaction per DH ? ? LocalTransaction siTran = txManager . createLocalTransaction ( true ) ; dh . setToBeDeleted ( true ) ; // Adjust the destination lookups in Destination Manager destinationIndex . delete ( dh ) ; dh . requestUpdate ( ( Transaction ) siTran ) ; // commit the transaction siTran . commit ( ) ; if ( ! dh . isTemporary ( ) && ! dh . isSystem ( ) ) SibTr . info ( tc , "LOCAL_DEST_DELETE_INFO_CWSIP00217" , new Object [ ] { dh . getName ( ) } ) ; } catch ( MessageStoreException e ) { // No FFDC code needed SibTr . exception ( tc , e ) ; // throw e ; } catch ( SIException e ) { // No FFDC code needed SibTr . exception ( tc , e ) ; // handleRollback ( siTran ) ; // throw e ; } } itr . finished ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reconcileLocal" ) ;
public class WalkingIterator { /** * Initialize the context values for this expression * after it is cloned . * @ param context The XPath runtime context for this * transformation . */ public void setRoot ( int context , Object environment ) { } }
super . setRoot ( context , environment ) ; if ( null != m_firstWalker ) { m_firstWalker . setRoot ( context ) ; m_lastUsedWalker = m_firstWalker ; }
public class JawnServletContext { /** * IP address of the requesting client . * If the IP of the request seems to come from a local proxy , * then the X - Forwarded - For header is returned . * @ return IP address of the requesting client . */ public String remoteIP ( ) { } }
String remoteAddr = request . getRemoteAddr ( ) ; // This could be a list of proxy IPs , which the developer could // provide via some configuration if ( "127.0.0.1" . equals ( remoteAddr ) ) remoteAddr = requestHeader ( "X-Forwarded-For" ) ; return remoteAddr ;
public class Iterate { /** * Filters a collection into two separate collections based on a predicate returned via a Twin . * Example using a Java 8 lambda expression : * < pre > * Twin & lt ; MutableList & lt ; Person & gt ; & gt ; selectedRejected = * Iterate . < b > selectAndRejectWith < / b > ( people , ( Person person , String lastName ) - > lastName . equals ( person . getLastName ( ) ) , " Mason " ) ; * < / pre > * Example using an anonymous inner class : * < pre > * Twin & lt ; MutableList & lt ; Person & gt ; & gt ; selectedRejected = * Iterate . < b > selectAndRejectWith < / b > ( people , new Predicate2 & lt ; String , String & gt ; ( ) * public boolean accept ( Person person , String lastName ) * return lastName . equals ( person . getLastName ( ) ) ; * } , " Mason " ) ; * < / pre > */ public static < T , IV > Twin < MutableList < T > > selectAndRejectWith ( Iterable < T > iterable , Predicate2 < ? super T , ? super IV > predicate , IV injectedValue ) { } }
if ( iterable instanceof MutableCollection ) { return ( ( MutableCollection < T > ) iterable ) . selectAndRejectWith ( predicate , injectedValue ) ; } if ( iterable instanceof ArrayList ) { return ArrayListIterate . selectAndRejectWith ( ( ArrayList < T > ) iterable , predicate , injectedValue ) ; } if ( iterable instanceof List ) { return ListIterate . selectAndRejectWith ( ( List < T > ) iterable , predicate , injectedValue ) ; } if ( iterable != null ) { return IterableIterate . selectAndRejectWith ( iterable , predicate , injectedValue ) ; } throw new IllegalArgumentException ( "Cannot perform a selectAndRejectWith on null" ) ;
public class CommunicationManager { /** * Piece download completion handler . * When a piece is completed , and valid , we announce to all connected peers * that we now have this piece . * We use this handler to identify when all of the pieces have been * downloaded . When that ' s the case , we can start the seeding period , if * any . * @ param peer The peer we got the piece from . * @ param piece The piece in question . */ @ Override public void handlePieceCompleted ( final SharingPeer peer , final Piece piece ) throws IOException { } }
final SharedTorrent torrent = peer . getTorrent ( ) ; final String torrentHash = torrent . getHexInfoHash ( ) ; try { final Future < ? > validationFuture = myPieceValidatorExecutor . submit ( new Runnable ( ) { @ Override public void run ( ) { validatePieceAsync ( torrent , piece , torrentHash , peer ) ; } } ) ; torrent . markCompletedAndAddValidationFuture ( piece , validationFuture ) ; } catch ( RejectedExecutionException e ) { torrent . markUncompleted ( piece ) ; LoggerUtils . warnWithMessageAndDebugDetails ( logger , "Unable to submit validation task for torrent {}" , torrentHash , e ) ; }
public class Proxy { /** * Specifies the URL to be used for proxy auto - configuration . Expected format is * < code > http : / / hostname . com : 1234 / pacfile < / code > . This is required if { @ link # getProxyType ( ) } is * set to { @ link ProxyType # PAC } , ignored otherwise . * @ param proxyAutoconfigUrl the URL for proxy auto - configuration * @ return reference to self */ public Proxy setProxyAutoconfigUrl ( String proxyAutoconfigUrl ) { } }
verifyProxyTypeCompatibility ( ProxyType . PAC ) ; this . proxyType = ProxyType . PAC ; this . proxyAutoconfigUrl = proxyAutoconfigUrl ; return this ;
public class PersonGroupPersonsImpl { /** * Delete a face from a person . Relative image for the persisted face will also be deleted . * @ param personGroupId Id referencing a particular person group . * @ param personId Id referencing a particular person . * @ param persistedFaceId Id referencing a particular persistedFaceId of an existing face . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws APIErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void deleteFace ( String personGroupId , UUID personId , UUID persistedFaceId ) { } }
deleteFaceWithServiceResponseAsync ( personGroupId , personId , persistedFaceId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Maps { /** * Returns the value to which the specified key is mapped , or * an empty immutable { @ code List } if this map contains no mapping for the key . * @ param map * @ param key * @ return */ public static < K , E , V extends List < E > > List < E > getOrEmptyList ( final Map < K , V > map , final Object key ) { } }
if ( N . isNullOrEmpty ( map ) ) { return N . < E > emptyList ( ) ; } final V val = map . get ( key ) ; if ( val != null || map . containsKey ( key ) ) { return val ; } else { return N . emptyList ( ) ; }
public class AnimaQuery { /** * generate between statement , simultaneous setting value * @ param columnName column name * @ param a first range value * @ param b second range value * @ return AnimaQuery */ public AnimaQuery < T > between ( String columnName , Object a , Object b ) { } }
conditionSQL . append ( " AND " ) . append ( columnName ) . append ( " BETWEEN ? and ?" ) ; paramValues . add ( a ) ; paramValues . add ( b ) ; return this ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getPGPRGRGLength ( ) { } }
if ( pgprgrgLengthEEnum == null ) { pgprgrgLengthEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 124 ) ; } return pgprgrgLengthEEnum ;
public class TemplateBuilderHelper { /** * Registers and configures a { @ link TemplateParser } through a dedicated * builder . * For example : * < pre > * . register ( ThymeleafEmailBuilder . class ) * . detector ( new ThymeleafEngineDetector ( ) ) ; * < / pre > * Your { @ link Builder } may implement { @ link VariantBuilder } to handle * template { @ link Variant } s ( used for { @ link MultiTemplateContent } that * provide a single path to templates with different extensions for * example ) . * Your { @ link Builder } may also implement { @ link DetectorBuilder } in order * to indicate which kind of templates your { @ link TemplateParser } is able * to parse . If your template parse is able to parse any template file you * are using , you may not need to implement { @ link DetectorBuilder } . * In order to be able to keep chaining , you builder instance may provide a * constructor with two arguments : * < ul > * < li > The type of the parent builder ( { @ code & lt ; P & gt ; } ) < / li > * < li > The { @ link EnvironmentBuilder } instance < / li > * < / ul > * If you don ' t care about chaining , just provide a default constructor . * @ param builderClass * the builder class to instantiate * @ param < T > * the type of the builder * @ return the builder to configure the implementation */ @ SuppressWarnings ( "unchecked" ) public < T extends Builder < ? extends TemplateParser > > T register ( Class < T > builderClass ) { } }
// if already registered = > provide same instance for ( Builder < ? extends TemplateParser > builder : templateBuilders ) { if ( builderClass . isAssignableFrom ( builder . getClass ( ) ) ) { return ( T ) builder ; } } // create the builder instance try { T builder ; Constructor < T > constructor = builderClass . getConstructor ( parent . getClass ( ) , EnvironmentBuilder . class ) ; if ( constructor != null ) { builder = constructor . newInstance ( parent , environmentBuilder ) ; } else { builder = builderClass . newInstance ( ) ; } templateBuilders . add ( builder ) ; return builder ; } catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | SecurityException | IllegalArgumentException | InvocationTargetException e ) { throw new BuildException ( "Can't instantiate builder from class " + builderClass . getSimpleName ( ) , e ) ; }
public class FactoryMultiView { /** * Creates a trifocal tensor estimation algorithm . * @ param config configuration for the estimator * @ return Trifocal tensor estimator */ public static Estimate1ofTrifocalTensor trifocal_1 ( @ Nullable ConfigTrifocal config ) { } }
if ( config == null ) { config = new ConfigTrifocal ( ) ; } switch ( config . which ) { case LINEAR_7 : return new WrapTrifocalLinearPoint7 ( ) ; case ALGEBRAIC_7 : ConfigConverge cc = config . converge ; UnconstrainedLeastSquares optimizer = FactoryOptimization . levenbergMarquardt ( null , false ) ; TrifocalAlgebraicPoint7 alg = new TrifocalAlgebraicPoint7 ( optimizer , cc . maxIterations , cc . ftol , cc . gtol ) ; return new WrapTrifocalAlgebraicPoint7 ( alg ) ; } throw new IllegalArgumentException ( "Unknown type " + config . which ) ;
public class BitflyerAdapters { /** * Adapts a list of BitflyerBalance objects to Wallet . * @ param balances Some BitflyerBalances from the API * @ return A Wallet with balances in it */ public static Wallet adaptAccountInfo ( List < BitflyerBalance > balances ) { } }
List < Balance > adaptedBalances = new ArrayList < > ( balances . size ( ) ) ; for ( BitflyerBalance balance : balances ) { adaptedBalances . add ( new Balance ( Currency . getInstance ( balance . getCurrencyCode ( ) ) , balance . getAmount ( ) , balance . getAvailable ( ) ) ) ; } return new Wallet ( adaptedBalances ) ;