signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ExampleHelpers { /** * Processes all entities in a Wikidata dump using the given entity * processor . By default , the most recent JSON dump will be used . In offline * mode , only the most recent previously downloaded file is considered . * @ param entityDocumentProcessor * the object to use for processing entities in this dump */ public static void processEntitiesFromWikidataDump ( EntityDocumentProcessor entityDocumentProcessor ) { } }
// Controller object for processing dumps : DumpProcessingController dumpProcessingController = new DumpProcessingController ( "wikidatawiki" ) ; dumpProcessingController . setOfflineMode ( OFFLINE_MODE ) ; // / / Optional : Use another download directory : // dumpProcessingController . setDownloadDirectory ( System . getProperty ( " user . dir " ) ) ; // Should we process historic revisions or only current ones ? boolean onlyCurrentRevisions ; switch ( DUMP_FILE_MODE ) { case ALL_REVS : case ALL_REVS_WITH_DAILIES : onlyCurrentRevisions = false ; break ; case CURRENT_REVS : case CURRENT_REVS_WITH_DAILIES : case JSON : case JUST_ONE_DAILY_FOR_TEST : default : onlyCurrentRevisions = true ; } // Subscribe to the most recent entity documents of type wikibase item : dumpProcessingController . registerEntityDocumentProcessor ( entityDocumentProcessor , null , onlyCurrentRevisions ) ; // Also add a timer that reports some basic progress information : EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor ( TIMEOUT_SEC ) ; dumpProcessingController . registerEntityDocumentProcessor ( entityTimerProcessor , null , onlyCurrentRevisions ) ; MwDumpFile dumpFile = null ; try { // Start processing ( may trigger downloads where needed ) : switch ( DUMP_FILE_MODE ) { case ALL_REVS : case CURRENT_REVS : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . FULL ) ; break ; case ALL_REVS_WITH_DAILIES : case CURRENT_REVS_WITH_DAILIES : MwDumpFile fullDumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . FULL ) ; MwDumpFile incrDumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . DAILY ) ; lastDumpFileName = fullDumpFile . getProjectName ( ) + "-" + incrDumpFile . getDateStamp ( ) + "." + fullDumpFile . getDateStamp ( ) ; dumpProcessingController . processAllRecentRevisionDumps ( ) ; break ; case JSON : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . JSON ) ; break ; case JUST_ONE_DAILY_FOR_TEST : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . DAILY ) ; break ; default : throw new RuntimeException ( "Unsupported dump processing type " + DUMP_FILE_MODE ) ; } if ( dumpFile != null ) { lastDumpFileName = dumpFile . getProjectName ( ) + "-" + dumpFile . getDateStamp ( ) ; dumpProcessingController . processDump ( dumpFile ) ; } } catch ( TimeoutException e ) { // The timer caused a time out . Continue and finish normally . } // Print final timer results : entityTimerProcessor . close ( ) ;
public class SystemPublicMetrics { /** * Add JVM non - heap metrics . * @ param result the result */ private static void addNonHeapMetrics ( Collection < Metric < ? > > result ) { } }
MemoryUsage memoryUsage = ManagementFactory . getMemoryMXBean ( ) . getNonHeapMemoryUsage ( ) ; result . add ( newMemoryMetric ( "nonheap.committed" , memoryUsage . getCommitted ( ) ) ) ; result . add ( newMemoryMetric ( "nonheap.init" , memoryUsage . getInit ( ) ) ) ; result . add ( newMemoryMetric ( "nonheap.used" , memoryUsage . getUsed ( ) ) ) ; result . add ( newMemoryMetric ( "nonheap" , memoryUsage . getMax ( ) ) ) ;
public class MainFrame { /** * GEN - LAST : event _ settingsSaveMenuItemActionPerformed */ private void settingsSaveAsMenuItemActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ settingsSaveAsMenuItemActionPerformed JFileChooser fc = new JFileChooser ( ) ; if ( settingsFile != null ) { fc . setSelectedFile ( settingsFile ) ; } else { fc . setSelectedFile ( DEFAULT_SETTINGS_FILE ) ; } int returnVal = fc . showSaveDialog ( this ) ; if ( returnVal == JFileChooser . APPROVE_OPTION ) { settingsFile = fc . getSelectedFile ( ) ; settingsSaveMenuItemActionPerformed ( evt ) ; }
public class AttributesHelper { /** * Binds the < code > attr < / code > statement . */ public static ChainableStatement attr ( String key , String value ) { } }
return new DefaultChainableStatement ( "attr" , JsUtils . quotes ( key ) , JsUtils . quotes ( value ) ) ;
public class ProducerProperties { /** * Sets the administrative override for priority . * @ param pri */ public void setInPriorityOverride ( Integer pri ) { } }
// Check whether changes have been made . if ( ( ( inPriOverride == null ) && ( pri != null ) ) || ( ( inPriOverride != null ) && ( ! inPriOverride . equals ( pri ) ) ) ) { inPriOverride = pri ; recalcOutPriority ( ) ; }
public class AnalyticFormulas { /** * Calculates the Black - Scholes option value of an atm call option . * @ param volatility The Black - Scholes volatility . * @ param optionMaturity The option maturity T . * @ param forward The forward , i . e . , the expectation of the index under the measure associated with payoff unit . * @ param payoffUnit The payoff unit , i . e . , the discount factor or the anuity associated with the payoff . * @ return Returns the value of a European at - the - money call option under the Black - Scholes model */ public static double blackScholesATMOptionValue ( double volatility , double optionMaturity , double forward , double payoffUnit ) { } }
if ( optionMaturity < 0 ) { return 0.0 ; } // Calculate analytic value double dPlus = 0.5 * volatility * Math . sqrt ( optionMaturity ) ; double dMinus = - dPlus ; double valueAnalytic = ( NormalDistribution . cumulativeDistribution ( dPlus ) - NormalDistribution . cumulativeDistribution ( dMinus ) ) * forward * payoffUnit ; return valueAnalytic ;
public class JBBPFieldLong { /** * Get the reversed bit representation of the value . * @ param value the value to be reversed * @ return the reversed value */ public static long reverseBits ( final long value ) { } }
final long b0 = JBBPUtils . reverseBitsInByte ( ( byte ) value ) & 0xFFL ; final long b1 = JBBPUtils . reverseBitsInByte ( ( byte ) ( value >> 8 ) ) & 0xFFL ; final long b2 = JBBPUtils . reverseBitsInByte ( ( byte ) ( value >> 16 ) ) & 0xFFL ; final long b3 = JBBPUtils . reverseBitsInByte ( ( byte ) ( value >> 24 ) ) & 0xFFL ; final long b4 = JBBPUtils . reverseBitsInByte ( ( byte ) ( value >> 32 ) ) & 0xFFL ; final long b5 = JBBPUtils . reverseBitsInByte ( ( byte ) ( value >> 40 ) ) & 0xFFL ; final long b6 = JBBPUtils . reverseBitsInByte ( ( byte ) ( value >> 48 ) ) & 0xFFL ; final long b7 = JBBPUtils . reverseBitsInByte ( ( byte ) ( value >> 56 ) ) & 0xFFL ; return ( b0 << 56 ) | ( b1 << 48 ) | ( b2 << 40 ) | ( b3 << 32 ) | ( b4 << 24 ) | ( b5 << 16 ) | ( b6 << 8 ) | b7 ;
public class ClassPathUtil { /** * Get all class path strings that is file system path , not jar file . * Uncertain is relative or absolute . < br > * @ return class paths that is file system path . */ public static String [ ] getAllClassPathNotInJar ( ) { } }
String [ ] allClassPath = getAllClassPaths ( ) ; List < String > allClassPathNotInJar = new ArrayList < String > ( ) ; for ( String classPath : allClassPath ) { if ( classPath . endsWith ( "jar" ) ) { continue ; } allClassPathNotInJar . add ( classPath ) ; } return allClassPathNotInJar . toArray ( new String [ allClassPathNotInJar . size ( ) ] ) ;
public class ErrorProneFlags { /** * Gets flag value for the given key as a String , wrapped in an { @ link Optional } , which is empty * if the flag is unset . */ public Optional < String > get ( String key ) { } }
return Optional . ofNullable ( flagsMap . get ( key ) ) ;
public class AvatarDataNode { /** * Instantiate a single datanode object . This must be run by invoking * { @ link DataNode # runDatanodeDaemon ( DataNode ) } subsequently . */ public static AvatarDataNode instantiateDataNode ( String args [ ] , Configuration conf ) throws IOException { } }
if ( conf == null ) conf = new Configuration ( ) ; if ( ! parseArguments ( args , conf ) ) { printUsage ( ) ; return null ; } if ( conf . get ( "dfs.network.script" ) != null ) { LOG . error ( "This configuration for rack identification is not supported" + " anymore. RackID resolution is handled by the NameNode." ) ; System . exit ( - 1 ) ; } String [ ] dataDirs = getListOfDataDirs ( conf ) ; return makeInstance ( dataDirs , conf ) ;
public class hqlLexer { /** * $ ANTLR start " MIN " */ public final void mMIN ( ) throws RecognitionException { } }
try { int _type = MIN ; int _channel = DEFAULT_TOKEN_CHANNEL ; // hql . g : 46:5 : ( ' min ' ) // hql . g : 46:7 : ' min ' { match ( "min" ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class Matrix4x3f { /** * Set the value of the matrix element at column 0 and row 2. * @ param m02 * the new value * @ return this */ public Matrix4x3f m02 ( float m02 ) { } }
this . m02 = m02 ; properties &= ~ PROPERTY_ORTHONORMAL ; if ( m02 != 0.0f ) properties &= ~ ( PROPERTY_IDENTITY | PROPERTY_TRANSLATION ) ; return this ;
public class ContentSpecUtilities { /** * Gets all the fixed urls from a content specification level . * @ param level A level to get the fixed urls for * @ return A set of fixed urls used in the level . */ public static Set < String > getFixedURLs ( final Level level ) { } }
final Set < String > fixedUrls = new HashSet < String > ( ) ; for ( final Node childNode : level . getChildNodes ( ) ) { if ( childNode instanceof SpecNode ) { final SpecNode specNode = ( ( SpecNode ) childNode ) ; if ( ! isNullOrEmpty ( specNode . getFixedUrl ( ) ) ) { fixedUrls . add ( specNode . getFixedUrl ( ) ) ; } } if ( childNode instanceof Level ) { fixedUrls . addAll ( getFixedURLs ( ( Level ) childNode ) ) ; } } return fixedUrls ;
public class SynchroReader { /** * Extract data for a single task . * @ param parent task parent * @ param row Synchro task data */ private void processTask ( ChildTaskContainer parent , MapRow row ) throws IOException { } }
Task task = parent . addTask ( ) ; task . setName ( row . getString ( "NAME" ) ) ; task . setGUID ( row . getUUID ( "UUID" ) ) ; task . setText ( 1 , row . getString ( "ID" ) ) ; task . setDuration ( row . getDuration ( "PLANNED_DURATION" ) ) ; task . setRemainingDuration ( row . getDuration ( "REMAINING_DURATION" ) ) ; task . setHyperlink ( row . getString ( "URL" ) ) ; task . setPercentageComplete ( row . getDouble ( "PERCENT_COMPLETE" ) ) ; task . setNotes ( getNotes ( row . getRows ( "COMMENTARY" ) ) ) ; task . setMilestone ( task . getDuration ( ) . getDuration ( ) == 0 ) ; ProjectCalendar calendar = m_calendarMap . get ( row . getUUID ( "CALENDAR_UUID" ) ) ; if ( calendar != m_project . getDefaultCalendar ( ) ) { task . setCalendar ( calendar ) ; } switch ( row . getInteger ( "STATUS" ) . intValue ( ) ) { case 1 : // Planned { task . setStart ( row . getDate ( "PLANNED_START" ) ) ; task . setFinish ( task . getEffectiveCalendar ( ) . getDate ( task . getStart ( ) , task . getDuration ( ) , false ) ) ; break ; } case 2 : // Started { task . setActualStart ( row . getDate ( "ACTUAL_START" ) ) ; task . setStart ( task . getActualStart ( ) ) ; task . setFinish ( row . getDate ( "ESTIMATED_FINISH" ) ) ; if ( task . getFinish ( ) == null ) { task . setFinish ( row . getDate ( "PLANNED_FINISH" ) ) ; } break ; } case 3 : // Finished { task . setActualStart ( row . getDate ( "ACTUAL_START" ) ) ; task . setActualFinish ( row . getDate ( "ACTUAL_FINISH" ) ) ; task . setPercentageComplete ( Double . valueOf ( 100.0 ) ) ; task . setStart ( task . getActualStart ( ) ) ; task . setFinish ( task . getActualFinish ( ) ) ; break ; } } setConstraints ( task , row ) ; processChildTasks ( task , row ) ; m_taskMap . put ( task . getGUID ( ) , task ) ; List < MapRow > predecessors = row . getRows ( "PREDECESSORS" ) ; if ( predecessors != null && ! predecessors . isEmpty ( ) ) { m_predecessorMap . put ( task , predecessors ) ; } List < MapRow > resourceAssignmnets = row . getRows ( "RESOURCE_ASSIGNMENTS" ) ; if ( resourceAssignmnets != null && ! resourceAssignmnets . isEmpty ( ) ) { processResourceAssignments ( task , resourceAssignmnets ) ; }
public class DescribeDenseHogFastAlg { /** * Compute the descriptor from the specified cells . ( row , col ) to ( row + w , col + w ) * @ param row Lower extent of cell rows * @ param col Lower extent of cell columns */ void computeDescriptor ( int row , int col ) { } }
// set location to top - left pixel locations . grow ( ) . set ( col * pixelsPerCell , row * pixelsPerCell ) ; TupleDesc_F64 d = descriptions . grow ( ) ; int indexDesc = 0 ; for ( int i = 0 ; i < cellsPerBlockY ; i ++ ) { for ( int j = 0 ; j < cellsPerBlockX ; j ++ ) { Cell c = cells [ ( row + i ) * cellCols + ( col + j ) ] ; for ( int k = 0 ; k < c . histogram . length ; k ++ ) { d . value [ indexDesc ++ ] = c . histogram [ k ] ; } } } // Apply SIFT style L2 - Hys normalization DescribeSiftCommon . normalizeDescriptor ( d , 0.2 ) ;
public class RevocationStatus { /** * Mark the owning item as revoked . * @ param sRevocationUserID * The ID of the user who revoked it . May neither be < code > null < / code > * nor empty . * @ param aRevocationDT * The date and time when the revocation took place . May not be * < code > null < / code > . * @ param sRevocationReason * A human readable reason why revocation took place . May neither be * < code > null < / code > nor empty . * @ throws IllegalStateException * If this status already denotes a revoked object . */ public void markRevoked ( @ Nonnull @ Nonempty final String sRevocationUserID , @ Nonnull final LocalDateTime aRevocationDT , @ Nonnull @ Nonempty final String sRevocationReason ) { } }
ValueEnforcer . notEmpty ( sRevocationUserID , "RevocationUserID" ) ; ValueEnforcer . notNull ( aRevocationDT , "RevocationDT" ) ; ValueEnforcer . notEmpty ( sRevocationReason , "RevocationReason" ) ; if ( m_bRevoked ) throw new IllegalStateException ( "This object is already revoked!" ) ; m_bRevoked = true ; m_sRevocationUserID = sRevocationUserID ; m_aRevocationDT = aRevocationDT ; m_sRevocationReason = sRevocationReason ;
public class MergingReader { /** * It is possible that a reducer does not iterate over all of the items given to it for a given * key . However on the next callback they expect to receive items for the following key . this * method skips over all the left over items from the previous key they did not read . */ private void skipLeftoverItems ( ) { } }
if ( lastKey == null || lowestReaderQueue . isEmpty ( ) ) { return ; } boolean itemSkiped ; do { PeekingInputReader < KeyValue < ByteBuffer , ? extends Iterable < V > > > reader = lowestReaderQueue . remove ( ) ; itemSkiped = skipItemsOnReader ( reader ) ; addReaderToQueueIfNotEmpty ( reader ) ; } while ( itemSkiped ) ;
public class SpectralClustering { /** * Returns a { @ link ClusterResult } when { @ link matrix } is spectrally * clustered at a given { @ link depth } . This will recursively call itself * until the number of rows in { @ code matrix } is less than or equal to 1. */ private ClusterResult fullCluster ( Matrix matrix , final int depth ) { } }
verbose ( "Clustering at depth " + depth ) ; // If the matrix has only one element or the depth is equal to the // maximum number of desired clusters then all items are in a single // cluster . if ( matrix . rows ( ) <= 1 ) return new ClusterResult ( new int [ matrix . rows ( ) ] , 1 ) ; // Get a fresh new eigen cutter and compute the specral cut of the // matrix . EigenCut eigenCutter = cutterGenerator . generate ( ) ; eigenCutter . computeCut ( matrix ) ; final Matrix leftMatrix = eigenCutter . getLeftCut ( ) ; final Matrix rightMatrix = eigenCutter . getRightCut ( ) ; verbose ( String . format ( "Splitting into two matricies %d-%d" , leftMatrix . rows ( ) , rightMatrix . rows ( ) ) ) ; // If the compute decided that the matrix should not be split , short // circuit any attempts to further cut the matrix . if ( leftMatrix . rows ( ) == matrix . rows ( ) || rightMatrix . rows ( ) == matrix . rows ( ) ) return new ClusterResult ( new int [ matrix . rows ( ) ] , 1 ) ; // Do clustering on the left and right branches . We can do this in // parallel because all of the data members in this class are thread // safe , since each call to fullCluster uses a new instance of a // EigenCutter which has all of the state for a particular partition . final ClusterResult [ ] results = new ClusterResult [ 2 ] ; results [ 0 ] = fullCluster ( leftMatrix , depth + 1 ) ; results [ 1 ] = fullCluster ( rightMatrix , depth + 1 ) ; ClusterResult leftResult = results [ 0 ] ; ClusterResult rightResult = results [ 1 ] ; verbose ( "Merging at depth " + depth ) ; // Compute the relaxed correlation objective function over the split // partitions found so far . double splitObjective = eigenCutter . getSplitObjective ( alpha , beta , leftResult . numClusters , leftResult . assignments , rightResult . numClusters , rightResult . assignments ) ; // Compute the objective when we merge the two branches together . double mergedObjective = eigenCutter . getMergedObjective ( alpha , beta ) ; // If the merged objective value is less than the split version , combine // all clusters into one . int [ ] assignments = new int [ matrix . rows ( ) ] ; int numClusters = 1 ; if ( mergedObjective < splitObjective ) { verbose ( "Selecting to combine sub trees at depth " + depth ) ; Arrays . fill ( assignments , 0 ) ; } else { verbose ( "Selecting to maintain sub trees at depth " + depth ) ; // Copy over the left assignments and the right assignments , where // the cluster id ' s of the right assignments are incremented to // avoid duplicate cluster ids . numClusters = leftResult . numClusters + rightResult . numClusters ; int [ ] leftReordering = eigenCutter . getLeftReordering ( ) ; int [ ] rightReordering = eigenCutter . getRightReordering ( ) ; for ( int index = 0 ; index < leftReordering . length ; ++ index ) assignments [ leftReordering [ index ] ] = leftResult . assignments [ index ] ; for ( int index = 0 ; index < rightReordering . length ; ++ index ) assignments [ rightReordering [ index ] ] = rightResult . assignments [ index ] + leftResult . numClusters ; } return new ClusterResult ( assignments , numClusters ) ;
public class HierarchicalConfigurationUtils { /** * 从多个配置对象中找出符合给定名字的配置对象 。 * @ param config * 配置对象 * @ param subKey * 子项配置键值 * @ param nameKey * 名字配置键值 * @ param nameValue * 属性值 * @ return 匹配的配置对象 * @ throws ConfigurationException * 找不到指定配置 */ public static HierarchicalConfiguration findForName ( final HierarchicalConfiguration config , final String subKey , final String nameKey , final String nameValue ) throws ConfigurationException { } }
for ( HierarchicalConfiguration subConfig : config . configurationsAt ( subKey ) ) { String name = subConfig . getString ( nameKey ) ; if ( name . equals ( nameValue ) ) { return subConfig ; } } // 没有找到 throw new ConfigurationException ( MessageFormats . format ( ResourceBundles . getBundle ( RESOURCE_BASENAME ) , "subnodeNotFound" , nameKey , nameValue , subKey ) ) ;
public class PrecedingAxis { /** * { @ inheritDoc } */ @ Override public final void reset ( final long mNodeKey ) { } }
super . reset ( mNodeKey ) ; mIsFirst = true ; mStack = new Stack < Long > ( ) ;
public class OsmdroidShapeMarkers { /** * Polyline add a marker in the list of markers to where it is closest to * the the surrounding points * @ param marker * @ param markers */ public static void addMarkerAsPolyline ( Marker marker , List < Marker > markers ) { } }
GeoPoint position = marker . getPosition ( ) ; int insertLocation = markers . size ( ) ; if ( markers . size ( ) > 1 ) { double [ ] distances = new double [ markers . size ( ) ] ; insertLocation = 0 ; distances [ 0 ] = SphericalUtil . computeDistanceBetween ( position , markers . get ( 0 ) . getPosition ( ) ) ; for ( int i = 1 ; i < markers . size ( ) ; i ++ ) { distances [ i ] = SphericalUtil . computeDistanceBetween ( position , markers . get ( i ) . getPosition ( ) ) ; if ( distances [ i ] < distances [ insertLocation ] ) { insertLocation = i ; } } Integer beforeLocation = insertLocation > 0 ? insertLocation - 1 : null ; Integer afterLocation = insertLocation < distances . length - 1 ? insertLocation + 1 : null ; if ( beforeLocation != null && afterLocation != null ) { if ( distances [ beforeLocation ] > distances [ afterLocation ] ) { insertLocation = afterLocation ; } } else if ( beforeLocation != null ) { if ( distances [ beforeLocation ] >= SphericalUtil . computeDistanceBetween ( markers . get ( beforeLocation ) . getPosition ( ) , markers . get ( insertLocation ) . getPosition ( ) ) ) { insertLocation ++ ; } } else { if ( distances [ afterLocation ] < SphericalUtil . computeDistanceBetween ( markers . get ( afterLocation ) . getPosition ( ) , markers . get ( insertLocation ) . getPosition ( ) ) ) { insertLocation ++ ; } } } markers . add ( insertLocation , marker ) ;
public class DateTimeFormatter { /** * Prints a ReadablePartial . * Neither the override chronology nor the override zone are used * by this method . * @ param appendable the destination to format to , not null * @ param partial partial to format * @ since 2.0 */ public void printTo ( Appendable appendable , ReadablePartial partial ) throws IOException { } }
InternalPrinter printer = requirePrinter ( ) ; if ( partial == null ) { throw new IllegalArgumentException ( "The partial must not be null" ) ; } printer . printTo ( appendable , partial , iLocale ) ;
public class BodyContentImpl { /** * Write a portion of a String . * @ param s String to be written * @ param off Offset from which to start reading characters * @ param len Number of characters to be written */ public void write ( String s , int off , int len ) throws IOException { } }
if ( writer != null ) { writer . write ( s , off , len ) ; } else { ensureOpen ( ) ; // PK33136 strBuffer . append ( s . substring ( off , off + len ) ) ; nextChar += len ; // PK33136 }
public class StitchAuthImpl { /** * Adds a listener for any important auth event . * @ see StitchAuthListener */ public void addAuthListener ( final StitchAuthListener listener ) { } }
listeners . put ( listener , Boolean . TRUE ) ; // Trigger the onUserLoggedIn event in case some event happens and // this caller would miss out on this event other wise . onAuthEvent ( listener ) ; dispatcher . dispatchTask ( new Callable < Void > ( ) { @ Override public Void call ( ) { listener . onListenerRegistered ( StitchAuthImpl . this ) ; return null ; } } ) ;
public class ModelItem { /** * Adds a perspective to this model item . * @ param name the name of the perspective ( e . g . " Security " , must be unique ) * @ param description a description of the perspective * @ return a Perspective object * @ throws IllegalArgumentException if perspective details are not specified , or the named perspective exists already */ public Perspective addPerspective ( String name , String description ) { } }
if ( StringUtils . isNullOrEmpty ( name ) ) { throw new IllegalArgumentException ( "A name must be specified." ) ; } if ( StringUtils . isNullOrEmpty ( description ) ) { throw new IllegalArgumentException ( "A description must be specified." ) ; } if ( perspectives . stream ( ) . filter ( p -> p . getName ( ) . equals ( name ) ) . count ( ) > 0 ) { throw new IllegalArgumentException ( "A perspective named \"" + name + "\" already exists." ) ; } Perspective perspective = new Perspective ( name , description ) ; perspectives . add ( perspective ) ; return perspective ;
public class ServiceDirectoryConfig { /** * Get the property object as Boolean , or return defaultVal if property is not defined . * @ param name * property name . * @ param defaultVal * default property value . * @ return * property value as boolean , return defaultVal if property is undefined . */ public boolean getBoolean ( String name , boolean defaultVal ) { } }
if ( this . configuration . containsKey ( name ) ) { return this . configuration . getBoolean ( name ) ; } else { return defaultVal ; }
public class BackendServiceClient { /** * Returns the specified BackendService resource . Gets a list of available backend services . * < p > Sample code : * < pre > < code > * try ( BackendServiceClient backendServiceClient = BackendServiceClient . create ( ) ) { * ProjectGlobalBackendServiceName backendService = ProjectGlobalBackendServiceName . of ( " [ PROJECT ] " , " [ BACKEND _ SERVICE ] " ) ; * BackendService response = backendServiceClient . getBackendService ( backendService ) ; * < / code > < / pre > * @ param backendService Name of the BackendService resource to return . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final BackendService getBackendService ( ProjectGlobalBackendServiceName backendService ) { } }
GetBackendServiceHttpRequest request = GetBackendServiceHttpRequest . newBuilder ( ) . setBackendService ( backendService == null ? null : backendService . toString ( ) ) . build ( ) ; return getBackendService ( request ) ;
public class TwilioFactorProvider { /** * Setter for the Twilio From number . * @ param from the from number to set . * @ throws IllegalArgumentException when both ` from ` and ` messagingServiceSID ` are set * @ deprecated use the constructor instead */ @ Deprecated @ JsonProperty ( "from" ) public void setFrom ( String from ) throws IllegalArgumentException { } }
if ( messagingServiceSID != null ) { throw new IllegalArgumentException ( "You must specify either `from` or `messagingServiceSID`, but not both" ) ; } this . from = from ;
public class SimpleMapSerializer { /** * 写一个String * @ param out 输出流 * @ param str 字符串 * @ throws IOException 写入异常 */ protected void writeString ( OutputStream out , String str ) throws IOException { } }
if ( str == null ) { writeInt ( out , - 1 ) ; } else if ( str . isEmpty ( ) ) { writeInt ( out , 0 ) ; } else { byte [ ] bs = StringSerializer . encode ( str ) ; writeInt ( out , bs . length ) ; out . write ( bs ) ; }
public class HtmlPolicyBuilder { /** * Reverses a decision made by { @ link # allowUrlProtocols } . */ public HtmlPolicyBuilder disallowUrlProtocols ( String ... protocols ) { } }
invalidateCompiledState ( ) ; for ( String protocol : protocols ) { protocol = Strings . toLowerCase ( protocol ) ; allowedProtocols . remove ( protocol ) ; } return this ;
public class KAFDocument { /** * Creates a new opinion object . It assigns an appropriate ID to it . The opinion is added to the document . * @ return a new opinion . */ public Opinion newOpinion ( ) { } }
String newId = idManager . getNextId ( AnnotationType . OPINION ) ; Opinion newOpinion = new Opinion ( newId ) ; annotationContainer . add ( newOpinion , Layer . OPINIONS , AnnotationType . OPINION ) ; return newOpinion ;
public class SymmetryTools { /** * Given a symmetry result , it calculates the overall global symmetry , * factoring out the alignment and detection steps of * { @ link QuatSymmetryDetector } algorithm . * @ param result * symmetry result * @ return global symmetry results * @ throws StructureException */ public static QuatSymmetryResults getQuaternarySymmetry ( CeSymmResult result ) throws StructureException { } }
// Obtain the subunits of the repeats List < Atom [ ] > atoms = toRepeatsAlignment ( result ) . getAtomArrays ( ) ; List < Subunit > subunits = atoms . stream ( ) . map ( a -> new Subunit ( a , null , null , null ) ) . collect ( Collectors . toList ( ) ) ; // The clustering thresholds are set to 0 so that all always merged SubunitClustererParameters cp = new SubunitClustererParameters ( ) ; cp . setClustererMethod ( SubunitClustererMethod . STRUCTURE ) ; cp . setRMSDThreshold ( 10.0 ) ; cp . setStructureCoverageThreshold ( 0.0 ) ; QuatSymmetryParameters sp = new QuatSymmetryParameters ( ) ; QuatSymmetryResults gSymmetry = QuatSymmetryDetector . calcGlobalSymmetry ( subunits , sp , cp ) ; return gSymmetry ;
public class MetricInstrumentedIterator { /** * If the iterator argument is non - null , then return a new * { @ code MetricInstrumentedIterator } wrapping it . Metrics for method calls * on the wrapped instance will be prefixed with the string { @ code prefix } * which must be non - null . If the iterator argument is null , then return * null . * @ param keyIterator * the iterator to wrap with Metrics measurements * @ param prefix * the Metrics name prefix string * @ return a wrapper around { @ code keyIterator } or null if * { @ code keyIterator } is null */ public static MetricInstrumentedIterator of ( KeyIterator keyIterator , String ... prefix ) { } }
if ( keyIterator == null ) { return null ; } Preconditions . checkNotNull ( prefix ) ; return new MetricInstrumentedIterator ( keyIterator , StringUtils . join ( prefix , "." ) ) ;
public class ExecutorServiceFactories { /** * Returns a { @ link ExecutorServiceFactory } which creates threadpool executors with the given base * name and number of threads . Created threads will be daemonic . * @ param name the base name for executor thread names * @ param nThreads the number of threads to create executors with * @ return the { @ link ExecutorServiceFactory } */ public static ExecutorServiceFactory fixedThreadPool ( String name , int nThreads ) { } }
return ( ) -> Executors . newFixedThreadPool ( nThreads , ThreadFactoryUtils . build ( name + "-%d" , true ) ) ;
public class Utils4Swing { /** * Hides the glass pane and restores the saved state . * @ param state * State to restore - Cannot be < code > null < / code > . */ public static void hideGlassPane ( final GlassPaneState state ) { } }
Utils4J . checkNotNull ( "state" , state ) ; final Component glassPane = state . getGlassPane ( ) ; glassPane . removeMouseListener ( state . getMouseListener ( ) ) ; glassPane . setCursor ( state . getCursor ( ) ) ; glassPane . setVisible ( false ) ; if ( state . getFocusOwner ( ) != null ) { state . getFocusOwner ( ) . requestFocus ( ) ; }
public class JCRAssert { /** * Asserts that a specific node with the given absolute path does not exist in the session * @ param session * the session to search for the node * @ param absPath * the absolute path to look for a node * @ throws RepositoryException * if the repository access failed */ public static void assertNodeNotExistByPath ( final Session session , final String absPath ) throws RepositoryException { } }
try { session . getNode ( absPath ) ; fail ( "Node " + absPath + " does not exist" ) ; } catch ( final PathNotFoundException e ) { // NOSONAR // the exception is expected }
public class inat { /** * Use this API to fetch filtered set of inat resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static inat [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
inat obj = new inat ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; inat [ ] response = ( inat [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class BiddingStrategyConfiguration { /** * Gets the biddingScheme value for this BiddingStrategyConfiguration . * @ return biddingScheme * The bidding strategy metadata . Bidding strategy can be associated * using the { @ linkplain * BiddingStrategyConfiguration # biddingStrategyType } * or the bidding scheme . * < p > For details on portfolio vs . standard availability , * see the < a * href = " https : / / developers . google . com / adwords / api / docs / guides / bidding " > bidding * guide < / a > . * < p > Starting with v201705 , this field cannot be set * at the ad group or ad group criterion level . */ public com . google . api . ads . adwords . axis . v201809 . cm . BiddingScheme getBiddingScheme ( ) { } }
return biddingScheme ;
public class SyncAgentsInner { /** * Lists databases linked to a sync agent . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; SyncAgentLinkedDatabaseInner & gt ; object */ public Observable < ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > > listLinkedDatabasesNextWithServiceResponseAsync ( final String nextPageLink ) { } }
return listLinkedDatabasesNextSinglePageAsync ( nextPageLink ) . concatMap ( new Func1 < ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > , Observable < ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > > call ( ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listLinkedDatabasesNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class Base64 { /** * Encodes a byte array into Base64 notation . * @ param source The data to convert * @ param off Offset in array where conversion should begin * @ param len Length of data to convert * @ since 1.4 */ public static String encodeBytes ( byte [ ] source , int off , int len ) { } }
return encodeBytes ( source , off , len , true ) ;
public class GroovyResultSetExtension { /** * Gets the value of the designated column in the current row * of as an < code > Object < / code > . * @ param columnName the SQL name of the column * @ return the returned column value * @ throws MissingPropertyException if an SQLException happens while getting the object * @ see groovy . lang . GroovyObject # getProperty ( java . lang . String ) * @ see ResultSet # getObject ( java . lang . String ) */ public Object getProperty ( String columnName ) { } }
try { return getResultSet ( ) . getObject ( columnName ) ; } catch ( SQLException e ) { throw new MissingPropertyException ( columnName , GroovyResultSetProxy . class , e ) ; }
public class Stax2DomBuilder { /** * This method takes a < code > XMLStreamReader < / code > and builds up a JDOM tree . Recursion has been eliminated by using nodes ' parent / child relationship ; this improves performance somewhat ( classic recursion - by - iteration - and - explicit stack transformation ) * @ param r * Stream reader to use for reading the document from which to build the tree * @ param doc * JDOM < code > Document < / code > being built . * @ throws XMLStreamException * for fun */ protected void buildTree ( XMLStreamReader r , Document doc ) throws XMLStreamException { } }
checkReaderSettings ( r ) ; Node current = doc ; // At top level main_loop : while ( true ) { int evtType = r . next ( ) ; Node child ; switch ( evtType ) { case XMLStreamConstants . CDATA : child = doc . createCDATASection ( r . getText ( ) ) ; break ; case XMLStreamConstants . SPACE : if ( mCfgIgnoreWs ) { continue main_loop ; } /* * Oh great . DOM is brain - dead in that ignorable white space * can not be added , even though it is legal , and often * reported by StAX / SAX impls . . . */ if ( current == doc ) { // better just ignore , thus . . . continue ; } // fall through case XMLStreamConstants . CHARACTERS : child = doc . createTextNode ( r . getText ( ) ) ; break ; case XMLStreamConstants . COMMENT : child = doc . createComment ( r . getText ( ) ) ; break ; case XMLStreamConstants . END_DOCUMENT : break main_loop ; case XMLStreamConstants . END_ELEMENT : current = current . getParentNode ( ) ; if ( current == null ) { current = doc ; } continue main_loop ; case XMLStreamConstants . ENTITY_DECLARATION : case XMLStreamConstants . NOTATION_DECLARATION : /* * Shouldn ' t really get these , but maybe some stream readers * do provide the info . If so , better ignore it - - DTD event * should have most / all we need . */ continue main_loop ; case XMLStreamConstants . ENTITY_REFERENCE : child = doc . createEntityReference ( r . getLocalName ( ) ) ; break ; case XMLStreamConstants . PROCESSING_INSTRUCTION : child = doc . createProcessingInstruction ( r . getPITarget ( ) , r . getPIData ( ) ) ; break ; case XMLStreamConstants . START_ELEMENT : // Ok , need to add a new element . . . { String ln = r . getLocalName ( ) ; Element newElem ; if ( mNsAware ) { String elemPrefix = r . getPrefix ( ) ; // Doh , DOM requires a silly qualified name . . . if ( elemPrefix != null && elemPrefix . length ( ) > 0 ) { newElem = doc . createElementNS ( r . getNamespaceURI ( ) , getQualified ( elemPrefix , ln ) ) ; } else { newElem = doc . createElementNS ( r . getNamespaceURI ( ) , ln ) ; } } else { // if non - ns - aware , things are simpler : newElem = doc . createElement ( ln ) ; } /* * No need to check namespace bindings , unlikes with some * other frameworks ( JDOM ) */ // And then the attributes : for ( int i = 0 , len = r . getAttributeCount ( ) ; i < len ; ++ i ) { ln = r . getAttributeLocalName ( i ) ; if ( mNsAware ) { String prefix = r . getAttributePrefix ( i ) ; if ( prefix != null && prefix . length ( ) > 0 ) { ln = getQualified ( prefix , ln ) ; } Attr attr = doc . createAttributeNS ( r . getAttributeNamespace ( i ) , ln ) ; attr . setValue ( r . getAttributeValue ( i ) ) ; newElem . setAttributeNodeNS ( attr ) ; } else { Attr attr = doc . createAttribute ( ln ) ; attr . setValue ( r . getAttributeValue ( i ) ) ; newElem . setAttributeNode ( attr ) ; } } // And then ' push ' new element . . . current . appendChild ( newElem ) ; current = newElem ; continue main_loop ; } case XMLStreamConstants . START_DOCUMENT : /* * This should only be received at the beginning of * document . . . so , should we indicate the problem or not ? */ /* * For now , let it pass : maybe some ( broken ) readers pass * that info as first event in beginning of doc ? */ continue main_loop ; case XMLStreamConstants . DTD : /* * ! ! ! Note : StAX does not expose enough information about * doctype declaration ( specifically , public and system * id ! ) ; ( altough StAX2 would . . . ) * Worse , DOM1/2 do not specify a way to create the DocType * node , even if StAX provided it . This is pretty silly , all * in all . */ continue main_loop ; // Should never get these , from a stream reader : /* * ( commented out entries are just FYI ; default catches them * all ) */ // case XMLStreamConstants . ATTRIBUTE : // case XMLStreamConstants . NAMESPACE : default : throw new XMLStreamException ( "Unrecognized iterator event type: " + r . getEventType ( ) + "; should not receive such types (broken stream reader?)" ) ; } if ( child != null ) { current . appendChild ( child ) ; } }
public class OAuth2Utils { /** * Extract a map from a query string . * @ param query a query ( or fragment ) string from a URI * @ return a Map of the values in the query */ public static Map < String , String > extractMap ( String query ) { } }
Map < String , String > map = new HashMap < String , String > ( ) ; Properties properties = StringUtils . splitArrayElementsIntoProperties ( StringUtils . delimitedListToStringArray ( query , "&" ) , "=" ) ; if ( properties != null ) { for ( Object key : properties . keySet ( ) ) { map . put ( key . toString ( ) , properties . get ( key ) . toString ( ) ) ; } } return map ;
public class InfixOpNode { /** * Implements the ternary logic AND operation */ public static Object and ( Object left , Object right , EvaluationContext ctx ) { } }
Boolean l = EvalHelper . getBooleanOrNull ( left ) ; Boolean r = EvalHelper . getBooleanOrNull ( right ) ; // have to check for all nulls first to avoid NPE if ( ( l == null && r == null ) || ( l == null && r == true ) || ( r == null && l == true ) ) { return null ; } else if ( l == null || r == null ) { return false ; } return l && r ;
public class QueryConverter { /** * Retrieve ( in string format ) from this field . * @ return The display field of the grid record . */ public String getString ( ) { } }
FieldInfo field = this . getField ( ) ; if ( field instanceof StringField ) return field . toString ( ) ; if ( displayFieldName != null ) return m_record . getField ( displayFieldName ) . getString ( ) ; // Return the desc string else return m_record . getField ( m_iFieldSeq ) . getString ( ) ; // Return the desc string
public class CommerceWarehouseModelImpl { /** * Converts the soap model instance into a normal model instance . * @ param soapModel the soap model instance to convert * @ return the normal model instance */ public static CommerceWarehouse toModel ( CommerceWarehouseSoap soapModel ) { } }
if ( soapModel == null ) { return null ; } CommerceWarehouse model = new CommerceWarehouseImpl ( ) ; model . setCommerceWarehouseId ( soapModel . getCommerceWarehouseId ( ) ) ; model . setGroupId ( soapModel . getGroupId ( ) ) ; model . setCompanyId ( soapModel . getCompanyId ( ) ) ; model . setUserId ( soapModel . getUserId ( ) ) ; model . setUserName ( soapModel . getUserName ( ) ) ; model . setCreateDate ( soapModel . getCreateDate ( ) ) ; model . setModifiedDate ( soapModel . getModifiedDate ( ) ) ; model . setName ( soapModel . getName ( ) ) ; model . setDescription ( soapModel . getDescription ( ) ) ; model . setActive ( soapModel . isActive ( ) ) ; model . setStreet1 ( soapModel . getStreet1 ( ) ) ; model . setStreet2 ( soapModel . getStreet2 ( ) ) ; model . setStreet3 ( soapModel . getStreet3 ( ) ) ; model . setCity ( soapModel . getCity ( ) ) ; model . setZip ( soapModel . getZip ( ) ) ; model . setCommerceRegionId ( soapModel . getCommerceRegionId ( ) ) ; model . setCommerceCountryId ( soapModel . getCommerceCountryId ( ) ) ; model . setLatitude ( soapModel . getLatitude ( ) ) ; model . setLongitude ( soapModel . getLongitude ( ) ) ; model . setPrimary ( soapModel . isPrimary ( ) ) ; return model ;
public class GDiscreteFourierTransformOps { /** * Computes the phase of the complex image : < br > * phase = atan2 ( imaginary , real ) * @ param transform ( Input ) Complex interleaved image * @ param phase ( output ) Phase of image */ public static void phase ( ImageInterleaved transform , GrayF phase ) { } }
if ( transform instanceof InterleavedF32 ) { DiscreteFourierTransformOps . phase ( ( InterleavedF32 ) transform , ( GrayF32 ) phase ) ; } else if ( transform instanceof InterleavedF64 ) { DiscreteFourierTransformOps . phase ( ( InterleavedF64 ) transform , ( GrayF64 ) phase ) ; } else { throw new IllegalArgumentException ( "Unknown image type" ) ; }
public class Client { /** * Creates a Series * @ param series The Series to create * @ return The created Series * @ since 1.0.0 * @ throws NullPointerException If the input Series is null . */ public Result < Series > createSeries ( Series series ) { } }
checkNotNull ( series ) ; URI uri = null ; try { URIBuilder builder = new URIBuilder ( String . format ( "/%s/series/" , API_VERSION ) ) ; uri = builder . build ( ) ; } catch ( URISyntaxException e ) { String message = "Could not build URI" ; throw new IllegalArgumentException ( message , e ) ; } Result < Series > result = null ; String body = null ; try { body = Json . dumps ( series ) ; } catch ( JsonProcessingException e ) { String message = "Error serializing the body of the request. More detail: " + e . getMessage ( ) ; result = new Result < Series > ( null , GENERIC_ERROR_CODE , message ) ; return result ; } HttpRequest request = buildRequest ( uri . toString ( ) , HttpMethod . POST , body ) ; result = execute ( request , Series . class ) ; return result ;
public class UIUtils { /** * Get the selectableItemBackground attribute drawable * @ return */ public static Drawable getSelectableItemBackgroundBorderless ( Context ctx ) { } }
int [ ] attrs = new int [ ] { R . attr . selectableItemBackgroundBorderless /* index 0 */ } ; TypedArray ta = ctx . obtainStyledAttributes ( attrs ) ; Drawable drawableFromTheme = ta . getDrawable ( 0 /* index */ ) ; ta . recycle ( ) ; return drawableFromTheme ;
public class UmlGraphDoc { /** * Option check , forwards options to the standard doclet , if that one refuses them , * they are sent to UmlGraph */ public static int optionLength ( String option ) { } }
int result = Standard . optionLength ( option ) ; if ( result != 0 ) return result ; else return UmlGraph . optionLength ( option ) ;
public class AbstractFileStorageEngine { /** * Returns the location of the directory from the configuration or the temporary directory if not defined . * @ return */ protected String getDirectory ( ) { } }
// get the default filepath of the permanet storage file String directory = storageConfiguration . getDirectory ( ) ; if ( directory == null || directory . isEmpty ( ) ) { directory = System . getProperty ( "java.io.tmpdir" ) ; // write them to the tmp directory } return directory ;
public class SpringSecurityXmAuthenticationContext { /** * { @ inheritDoc } */ @ Override public Optional < String > getSessionId ( ) { } }
return getDetails ( ) . flatMap ( details -> { String sessionId ; if ( details instanceof OAuth2AuthenticationDetails ) { sessionId = OAuth2AuthenticationDetails . class . cast ( details ) . getSessionId ( ) ; } else if ( details instanceof WebAuthenticationDetails ) { sessionId = WebAuthenticationDetails . class . cast ( details ) . getSessionId ( ) ; } else { throw new IllegalStateException ( "Unsupported auth details type " + details . getClass ( ) ) ; } return Optional . ofNullable ( sessionId ) ; } ) ;
public class CPFriendlyURLEntryLocalServiceBaseImpl { /** * Returns all the cp friendly url entries matching the UUID and company . * @ param uuid the UUID of the cp friendly url entries * @ param companyId the primary key of the company * @ return the matching cp friendly url entries , or an empty list if no matches were found */ @ Override public List < CPFriendlyURLEntry > getCPFriendlyURLEntriesByUuidAndCompanyId ( String uuid , long companyId ) { } }
return cpFriendlyURLEntryPersistence . findByUuid_C ( uuid , companyId ) ;
public class ThrottledApiHandler { /** * Retrieve summoner ids for the specified names * @ param names The names of the users * @ return Their respective ids */ public Future < List < Long > > getSummonerIds ( String ... names ) { } }
return new ApiFuture < > ( ( ) -> handler . getSummonerIds ( names ) ) ;
public class PageFlowRequestProcessor { /** * Process any direct request for a page flow by forwarding to its " begin " action . * @ param request the current HttpServletRequest * @ param response the current HttpServletResponse * @ param uri the decoded request URI * @ return < code > true < / code > if the request was for a page flow , in which case it was forwarded . * @ throws IOException * @ throws ServletException */ protected boolean processPageFlowRequest ( HttpServletRequest request , HttpServletResponse response , String uri ) throws IOException , ServletException { } }
// Forward requests for * . jpf to the " begin " action within the appropriate Struts module . if ( FileUtils . osSensitiveEndsWith ( uri , PageFlowConstants . PAGEFLOW_EXTENSION ) ) { // Make sure the current module config matches the request URI . If not , this could be an // EAR where the struts - config . xml wasn ' t included because of a compilation error . String modulePath = PageFlowUtils . getModulePath ( request ) ; if ( ! moduleConfig . getPrefix ( ) . equals ( modulePath ) ) { if ( LOG . isErrorEnabled ( ) ) { InternalStringBuilder msg = new InternalStringBuilder ( "No module configuration registered for " ) ; msg . append ( uri ) . append ( " (module path " ) . append ( modulePath ) . append ( ")." ) ; LOG . error ( msg . toString ( ) ) ; } if ( modulePath . length ( ) == 0 ) modulePath = "/" ; InternalUtils . sendDevTimeError ( "PageFlow_NoModuleConf" , null , HttpServletResponse . SC_INTERNAL_SERVER_ERROR , request , response , getServletContext ( ) , new Object [ ] { uri , modulePath } ) ; return true ; } // Make sure that the requested pageflow matches the pageflow for the directory . ActionMapping beginMapping = getBeginMapping ( ) ; if ( beginMapping != null ) { String desiredType = beginMapping . getParameter ( ) ; desiredType = desiredType . substring ( desiredType . lastIndexOf ( '.' ) + 1 ) + PAGEFLOW_EXTENSION ; String requestedType = InternalUtils . getDecodedServletPath ( request ) ; requestedType = requestedType . substring ( requestedType . lastIndexOf ( '/' ) + 1 ) ; if ( ! requestedType . equals ( desiredType ) ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Wrong .jpf requested for this directory: got " + requestedType + ", expected " + desiredType ) ; } if ( LOG . isErrorEnabled ( ) ) { InternalStringBuilder msg = new InternalStringBuilder ( "Wrong .jpf requested for this directory: got " ) ; msg . append ( requestedType ) . append ( ", expected " ) . append ( desiredType ) . append ( '.' ) ; LOG . error ( msg . toString ( ) ) ; } InternalUtils . sendDevTimeError ( "PageFlow_WrongPath" , null , HttpServletResponse . SC_INTERNAL_SERVER_ERROR , request , response , getServletContext ( ) , new Object [ ] { requestedType , desiredType } ) ; return true ; } } uri = PageFlowUtils . getBeginActionURI ( uri ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Got request for " + request . getRequestURI ( ) + ", forwarding to " + uri ) ; } doForward ( uri , request , response ) ; return true ; } return false ;
public class DynamicEnum { /** * < p > Attempts to register a given { @ link EnumValue } instance to its associated class type . If an instance with the * same name has already been registered , that instance will be returned . < / p > * @ param < T > Specific type of EnumValue being registered * @ param enumValue the value to register * @ return the previously defined EnumValue with specified name and type or the one passed into register * @ throws NullPointerException if enumValue is null */ public static < T extends EnumValue > T register ( @ NonNull T enumValue ) { } }
return Cast . as ( GLOBAL_REPOSITORY . computeIfAbsent ( enumValue . canonicalName ( ) , s -> enumValue ) ) ;
public class Gen { /** * Derived visitor method : check whether CharacterRangeTable * should be emitted , if so , put a new entry into CRTable * and call method to generate bytecode . * If not , just call method to generate bytecode . * @ see # genStats ( List , Env ) * @ param trees The list of trees to be visited . * @ param env The environment to use . * @ param crtFlags The CharacterRangeTable flags * indicating type of the entry . */ public void genStats ( List < JCStatement > trees , Env < GenContext > env , int crtFlags ) { } }
if ( ! genCrt ) { genStats ( trees , env ) ; return ; } if ( trees . length ( ) == 1 ) { // mark one statement with the flags genStat ( trees . head , env , crtFlags | CRT_STATEMENT ) ; } else { int startpc = code . curCP ( ) ; genStats ( trees , env ) ; code . crt . put ( trees , crtFlags , startpc , code . curCP ( ) ) ; }
public class StringParser { /** * Get the unified decimal string for parsing by the runtime library . This is * done by replacing ' , ' with ' . ' . * @ param sStr * The string to unified . Never < code > null < / code > . * @ return Never < code > null < / code > . */ @ Nonnull @ Nonempty private static String _getUnifiedDecimal ( @ Nonnull @ Nonempty final String sStr ) { } }
return StringHelper . replaceAll ( sStr , ',' , '.' ) ;
public class AbstractJsonDeserializer { /** * Convenience method for subclasses . * @ param paramName the name of the parameter * @ param errorMessage the errormessage to add to the exception if the param does not exist . * @ param clazz the class of the parameter that should be parsed . * @ param mapToUse the map to use as inputparameter source * @ param < A > the type of the parameter * @ return a stringparameter with given name . If it does not exist and the errormessage is provided , * an IOException is thrown with that message . if the errormessage is not provided , null is returned . * @ throws IOException Exception if the paramname does not exist and an errormessage is provided . */ protected < A > A getTypedParam ( String paramName , String errorMessage , Class < A > clazz , Map < String , Object > mapToUse ) throws IOException { } }
Object o = mapToUse . get ( paramName ) ; if ( o != null && clazz . isAssignableFrom ( o . getClass ( ) ) ) { return ( A ) o ; } else { if ( errorMessage != null ) { throw new IOException ( errorMessage ) ; } else { return null ; } }
public class Page { /** * Returns a page that assures all data is in memory . * May return the same page if all page data is already in memory . * This allows streaming data sources to skip sections that are not * accessed in a query . */ public Page getLoadedPage ( ) { } }
boolean allLoaded = true ; Block [ ] loadedBlocks = new Block [ blocks . length ] ; for ( int i = 0 ; i < blocks . length ; i ++ ) { loadedBlocks [ i ] = blocks [ i ] . getLoadedBlock ( ) ; if ( loadedBlocks [ i ] != blocks [ i ] ) { allLoaded = false ; } } if ( allLoaded ) { return this ; } return new Page ( loadedBlocks ) ;
public class SpdLongAggregate { /** * Check data type and call super to remove data - synchronized in super */ public boolean remove ( SpdData data ) { } }
if ( data == null ) return false ; if ( data instanceof SpdLong ) { return super . remove ( data ) ; } else { return false ; }
public class User { /** * Changes the user password permanently . * @ param token the reset token . see { @ link # generatePasswordResetToken ( ) } * @ param newpass the new password * @ return true if successful */ public final boolean resetPassword ( String token , String newpass ) { } }
if ( StringUtils . isBlank ( newpass ) || StringUtils . isBlank ( token ) || newpass . length ( ) < Config . MIN_PASS_LENGTH ) { return false ; } Sysprop s = CoreUtils . getInstance ( ) . getDao ( ) . read ( getAppid ( ) , identifier ) ; if ( isValidToken ( s , Config . _RESET_TOKEN , token ) ) { s . removeProperty ( Config . _RESET_TOKEN ) ; String hashed = Utils . bcrypt ( newpass ) ; s . addProperty ( Config . _PASSWORD , hashed ) ; setPassword ( hashed ) ; CoreUtils . getInstance ( ) . getDao ( ) . update ( getAppid ( ) , s ) ; return true ; } return false ;
public class DescribeComputeEnvironmentsRequest { /** * A list of up to 100 compute environment names or full Amazon Resource Name ( ARN ) entries . * @ param computeEnvironments * A list of up to 100 compute environment names or full Amazon Resource Name ( ARN ) entries . */ public void setComputeEnvironments ( java . util . Collection < String > computeEnvironments ) { } }
if ( computeEnvironments == null ) { this . computeEnvironments = null ; return ; } this . computeEnvironments = new java . util . ArrayList < String > ( computeEnvironments ) ;
public class JSONWriter { /** * Append an array value based on a custom JSONString implementation . * @ param jss The JSONString array or container to append . * Its elements can be null or implement JSONString . * @ return this * @ throws JSONException if the value is out of sequence or if a * toJSONString method throws an Exception or * if the writer throws an IOException */ public JSONWriter array ( Iterable < ? extends JSONString > iter ) throws JSONException { } }
array ( ) ; for ( JSONString element : iter ) { value ( element ) ; } endArray ( ) ; return this ;
public class ClientInterceptors { /** * Create a new { @ link Channel } that will call { @ code interceptors } before starting a call on the * given channel . The first interceptor will have its { @ link ClientInterceptor # interceptCall } * called first . * @ param channel the underlying channel to intercept . * @ param interceptors a list of interceptors to bind to { @ code channel } . * @ return a new channel instance with the interceptors applied . */ public static Channel interceptForward ( Channel channel , List < ? extends ClientInterceptor > interceptors ) { } }
List < ? extends ClientInterceptor > copy = new ArrayList < > ( interceptors ) ; Collections . reverse ( copy ) ; return intercept ( channel , copy ) ;
public class OrganizationResource { /** * Handle organization posts when the server got a request POST < dm _ url > / organization & MIME that contains an organization . * @ param organization The organization to add to Grapes database * @ return Response An acknowledgment : < br / > - 400 if the artifact is MIME is malformed < br / > - 500 if internal error < br / > - 201 if ok */ @ POST public Response postOrganization ( @ Auth final DbCredential credential , final Organization organization ) { } }
if ( ! credential . getRoles ( ) . contains ( DbCredential . AvailableRoles . DATA_UPDATER ) ) { throw new WebApplicationException ( Response . status ( Response . Status . UNAUTHORIZED ) . build ( ) ) ; } LOG . info ( "Got a post organization request." ) ; // Checks if the data is corrupted DataValidator . validate ( organization ) ; final DbOrganization dbOrganization = getModelMapper ( ) . getDbOrganization ( organization ) ; getOrganizationHandler ( ) . store ( dbOrganization ) ; return Response . ok ( ) . status ( HttpStatus . CREATED_201 ) . build ( ) ;
public class AutoCompleteEditText { /** * Replaces the current word with s . Used by Adapter to set the selected item as text . * @ param s text to replace with */ public void performCompletion ( String s ) { } }
int selStart = getSelectionStart ( ) ; int selEnd = getSelectionEnd ( ) ; if ( selStart != selEnd ) return ; Editable text = getText ( ) ; HintSpan [ ] spans = text . getSpans ( 0 , length ( ) , HintSpan . class ) ; if ( spans . length > 1 ) throw new IllegalStateException ( "more than one HintSpan" ) ; Word word = getCurrentWord ( ) ; if ( word == null ) throw new IllegalStateException ( "no word to complete" ) ; autoCompleting = true ; // for ( HintSpan span : spans ) // text . delete ( text . getSpanStart ( span ) , text . getSpanEnd ( span ) ) ; text . delete ( selStart , selStart + word . postCursor . length ( ) ) ; text . delete ( selStart - word . preCursor . length ( ) , selStart ) ; text . insert ( selStart - word . preCursor . length ( ) , s ) ; setSelection ( selStart - word . preCursor . length ( ) + s . length ( ) ) ; fireOnFilterEvent ( null ) ; super . setImeOptions ( prevOptions ) ; autoCompleting = false ;
public class TimelineModel { /** * Gets event by its index as String * @ param index index * @ return TimelineEvent found event or null */ public TimelineEvent getEvent ( String index ) { } }
return getEvent ( index != null ? Integer . valueOf ( index ) : - 1 ) ;
public class CmsModuleAddResourceTypeThread { /** * Locks the given resource temporarily . < p > * @ param resource the resource to lock * @ throws CmsException if locking fails */ private void lockTemporary ( CmsResource resource ) throws CmsException { } }
CmsObject cms = getCms ( ) ; CmsUser user = cms . getRequestContext ( ) . getCurrentUser ( ) ; CmsLock lock = cms . getLock ( resource ) ; if ( ! lock . isOwnedBy ( user ) ) { cms . lockResourceTemporary ( resource ) ; } else if ( ! lock . isOwnedInProjectBy ( user , cms . getRequestContext ( ) . getCurrentProject ( ) ) ) { cms . changeLock ( resource ) ; }
public class MClipboardPrinter { /** * Copie la sélection d ' une table dans le presse - papiers ( au format html pour Excel par exemple ) . * @ param table * MBasicTable */ protected void copySelectionToClipboard ( final MBasicTable table ) { } }
if ( table . getSelectionModel ( ) . isSelectionEmpty ( ) ) { return ; } final Toolkit toolkit = table . getToolkit ( ) ; final Clipboard clipboard = toolkit . getSystemClipboard ( ) ; final ByteArrayOutputStream byteArrayOut = new ByteArrayOutputStream ( 2048 ) ; try { writeHtml ( table , byteArrayOut , true ) ; final String charset = System . getProperty ( "file.encoding" ) ; final StringSelection contents = new StringSelection ( byteArrayOut . toString ( charset ) ) ; clipboard . setContents ( contents , contents ) ; } catch ( final IOException e ) { MSwingUtilities . showException ( e ) ; }
public class HybridTreetankStorageModule { /** * Bootstrap a new device as a treetank storage using nodes to abstract the * device . * @ throws IOException * is thrown if a node couldn ' t be created due to errors in the * backend . */ private void createStorage ( ) throws TTException { } }
LOGGER . debug ( "Creating storage with " + mDataNumbers + " nodes containing " + BLOCKS_IN_DATA + " blocks with " + IStorageModule . VIRTUAL_BLOCK_SIZE + " bytes each." ) ; // Creating mirror jCloudsStorageModule = new JCloudsStorageModule ( BLOCKS_IN_DATA * VIRTUAL_BLOCK_SIZE , Files . createTempDir ( ) ) ; IData data = this . mRtx . getCurrentData ( ) ; if ( data != null ) { return ; } for ( int i = 0 ; i < mDataNumbers ; i ++ ) { // Bootstrapping nodes containing clusterSize - many blocks / sectors . LOGGER . debug ( "Bootstraping node " + i + "\tof " + ( mDataNumbers - 1 ) ) ; this . mRtx . bootstrap ( new byte [ HybridTreetankStorageModule . BYTES_IN_DATA ] ) ; } this . mRtx . commit ( ) ;
public class SnappyOutputStream { /** * / * ( non - Javadoc ) * @ see java . io . OutputStream # close ( ) */ @ Override public void close ( ) throws IOException { } }
if ( closed ) { return ; } try { flush ( ) ; out . close ( ) ; } finally { closed = true ; inputBufferAllocator . release ( inputBuffer ) ; outputBufferAllocator . release ( outputBuffer ) ; inputBuffer = null ; outputBuffer = null ; }
public class PatternUtils { /** * Get the suffix matcher . This is similar to { @ link # tail ( Matcher ) } except that it intended * to be used with { @ link # getPrefix ( Matcher ) } */ static Matcher getSuffix ( Matcher matcher ) { } }
if ( matcher instanceof SeqMatcher ) { List < Matcher > ms = matcher . < SeqMatcher > as ( ) . matchers ( ) ; return SeqMatcher . create ( ms . subList ( 1 , ms . size ( ) ) ) ; } else if ( matcher instanceof ZeroOrMoreMatcher ) { ZeroOrMoreMatcher zm = matcher . as ( ) ; return zm . next ( ) ; } else if ( matcher instanceof CharSeqMatcher ) { String pattern = matcher . < CharSeqMatcher > as ( ) . pattern ( ) ; return pattern . length ( ) <= 1 ? TrueMatcher . INSTANCE : new CharSeqMatcher ( pattern . substring ( 1 ) ) ; } else { return TrueMatcher . INSTANCE ; }
public class SimpleBlas { /** * Compute x ^ T * y ( dot product ) */ public static float dot ( FloatMatrix x , FloatMatrix y ) { } }
// return NativeBlas . sdot ( x . length , x . data , 0 , 1 , y . data , 0 , 1 ) ; return JavaBlas . rdot ( x . length , x . data , 0 , 1 , y . data , 0 , 1 ) ;
public class Validator { /** * Validate a set of properties for a description , and return a report . * @ param props the input properties * @ param desc the configuration description * @ return the validation report */ public static Report validate ( final Properties props , final Description desc ) { } }
final List < Property > properties = desc . getProperties ( ) ; return validate ( props , properties ) ;
public class RequestContextExportingAppender { /** * Adds the specified { @ link AttributeKey } to the export list . * @ param alias the alias of the attribute to export * @ param attrKey the key of the attribute to export * @ param stringifier the { @ link Function } that converts the attribute value into a { @ link String } */ public void addAttribute ( String alias , AttributeKey < ? > attrKey , Function < ? , String > stringifier ) { } }
ensureNotStarted ( ) ; builder . addAttribute ( alias , attrKey , stringifier ) ;
public class UIContextImpl { /** * Stores the extrinsic state information for the given component . * @ param component the component to set the model for . * @ param model the model to set . */ @ Override public void setModel ( final WebComponent component , final WebModel model ) { } }
map . put ( component , model ) ;
public class Sequencer { /** * See { @ link Sequencer # registerNodeTypes ( String , org . modeshape . jcr . api . nodetype . NodeTypeManager , boolean ) } * @ param cndStream the input stream containing the CND file ; may not be null * @ param nodeTypeManager the node type manager with which the node types in the CND file should be registered ; may not be * null * @ param allowUpdate a boolean which indicates whether updates on existing node types are allowed or no . See * { @ link NodeTypeManager # registerNodeType ( javax . jcr . nodetype . NodeTypeDefinition , boolean ) } * @ throws RepositoryException if anything fails * @ throws IOException if any stream related operations fail */ protected void registerNodeTypes ( InputStream cndStream , NodeTypeManager nodeTypeManager , boolean allowUpdate ) throws RepositoryException , IOException { } }
if ( cndStream == null ) { throw new IllegalArgumentException ( "The stream to the given cnd file is null" ) ; } nodeTypeManager . registerNodeTypes ( cndStream , allowUpdate ) ;
public class JQMCollapsible { /** * Sets the header button as inline block . */ @ Override public void setInline ( boolean value ) { } }
inline = value ; if ( headingToggle != null ) { JQMCommon . setInlineEx ( headingToggle , value , JQMCommon . STYLE_UI_BTN_INLINE ) ; }
public class WriterBasedGenerator { /** * / * Same as " _ writeString2 ( ) " , except needs additional escaping * for subset of characters */ private void _writeStringASCII ( final int len , final int maxNonEscaped ) throws IOException , JsonGenerationException { } }
// And then we ' ll need to verify need for escaping etc : int end = _outputTail + len ; final int [ ] escCodes = _outputEscapes ; final int escLimit = Math . min ( escCodes . length , maxNonEscaped + 1 ) ; int escCode = 0 ; output_loop : while ( _outputTail < end ) { char c ; // Fast loop for chars not needing escaping escape_loop : while ( true ) { c = _outputBuffer [ _outputTail ] ; if ( c < escLimit ) { escCode = escCodes [ c ] ; if ( escCode != 0 ) { break escape_loop ; } } else if ( c > maxNonEscaped ) { escCode = CharacterEscapes . ESCAPE_STANDARD ; break escape_loop ; } if ( ++ _outputTail >= end ) { break output_loop ; } } int flushLen = ( _outputTail - _outputHead ) ; if ( flushLen > 0 ) { _writer . write ( _outputBuffer , _outputHead , flushLen ) ; } ++ _outputTail ; _prependOrWriteCharacterEscape ( c , escCode ) ; }
public class AngularPass { /** * Given a FUNCTION node returns array of STRING nodes representing function * parameters . * @ param n the FUNCTION node . * @ return STRING nodes . */ private List < Node > createDependenciesList ( Node n ) { } }
checkArgument ( n . isFunction ( ) ) ; Node params = NodeUtil . getFunctionParameters ( n ) ; if ( params != null ) { return createStringsFromParamList ( params ) ; } return new ArrayList < > ( ) ;
public class Vector3i { /** * / * ( non - Javadoc ) * @ see org . joml . Vector3ic # sub ( int , int , int , org . joml . Vector3i ) */ public Vector3i sub ( int x , int y , int z , Vector3i dest ) { } }
dest . x = this . x - x ; dest . y = this . y - y ; dest . z = this . z - z ; return dest ;
public class SimpleRetry { /** * Should a retry attempt occur . * @ return True if it should */ @ Override public boolean canRetry ( Throwable exception ) { } }
if ( exception == null ) { return false ; } Class < ? extends Throwable > exceptionClass = exception . getClass ( ) ; if ( hasIncludes && ! includes . contains ( exceptionClass ) ) { return false ; } else if ( hasExcludes && excludes . contains ( exceptionClass ) ) { return false ; } else { return this . attemptNumber . incrementAndGet ( ) < ( maxAttempts + 1 ) && ( ( maxDelay == null ) || overallDelay . get ( ) < maxDelay . toMillis ( ) ) ; }
public class FileCopyProgressPanel { /** * Set the number of the current file . If called outside the EDT this method * will switch to the UI thread using * < code > SwingUtilities . invokeLater ( Runnable ) < / code > . * @ param n * The N value in " N of M " . */ public final void setCurrentFile ( final int n ) { } }
if ( SwingUtilities . isEventDispatchThread ( ) ) { setCurrentFileIntern ( n ) ; } else { try { SwingUtilities . invokeLater ( new Runnable ( ) { public void run ( ) { setCurrentFileIntern ( n ) ; } } ) ; } catch ( final Exception ex ) { ignore ( ) ; } }
public class ClassificationServiceCache { /** * Keep a cache of items files associated with classification in order to improve performance . */ @ SuppressWarnings ( "unchecked" ) private static synchronized Map < String , Boolean > getCache ( GraphRewrite event ) { } }
Map < String , Boolean > result = ( Map < String , Boolean > ) event . getRewriteContext ( ) . get ( ClassificationServiceCache . class ) ; if ( result == null ) { result = Collections . synchronizedMap ( new LRUMap ( 30000 ) ) ; event . getRewriteContext ( ) . put ( ClassificationServiceCache . class , result ) ; } return result ;
public class DateFormat { /** * Formats a time object into a time string . Examples of time objects * are a time value expressed in milliseconds and a Date object . * @ param obj must be a Number or a Date or a Calendar . * @ param toAppendTo the string buffer for the returning time string . * @ return the formatted time string . * @ param fieldPosition keeps track of the position of the field * within the returned string . * On input : an alignment field , * if desired . On output : the offsets of the alignment field . For * example , given a time text " 1996.07.10 AD at 15:08:56 PDT " , * if the given fieldPosition is DateFormat . YEAR _ FIELD , the * begin index and end index of fieldPosition will be set to * 0 and 4 , respectively . * Notice that if the same time field appears * more than once in a pattern , the fieldPosition will be set for the first * occurrence of that time field . For instance , formatting a Date to * the time string " 1 PM PDT ( Pacific Daylight Time ) " using the pattern * " h a z ( zzzz ) " and the alignment field DateFormat . TIMEZONE _ FIELD , * the begin index and end index of fieldPosition will be set to * 5 and 8 , respectively , for the first occurrence of the timezone * pattern character ' z ' . * @ see java . text . Format */ @ Override public final StringBuffer format ( Object obj , StringBuffer toAppendTo , FieldPosition fieldPosition ) { } }
if ( obj instanceof Calendar ) return format ( ( Calendar ) obj , toAppendTo , fieldPosition ) ; else if ( obj instanceof Date ) return format ( ( Date ) obj , toAppendTo , fieldPosition ) ; else if ( obj instanceof Number ) return format ( new Date ( ( ( Number ) obj ) . longValue ( ) ) , toAppendTo , fieldPosition ) ; else throw new IllegalArgumentException ( "Cannot format given Object (" + obj . getClass ( ) . getName ( ) + ") as a Date" ) ;
public class DBUpdate { /** * Perform a bit operation on the given field * @ param field The field to perform the operation on * @ param operation The operation to perform * @ param value The value * @ return this object */ public static Builder bit ( String field , String operation , int value ) { } }
return new Builder ( ) . bit ( field , operation , value ) ;
public class CLI { /** * Run the appropriate command based on the command line parameters . * @ param args * the arguments passed through the CLI * @ throws IOException * exception if problems with the incoming data * @ throws JDOMException * a xml exception */ public final void run ( final String [ ] args ) throws IOException , JDOMException { } }
try { Parameters parameters = cliArgumentsParser . parse ( args ) ; if ( parameters . getStrategy ( ) == Strategy . TOKENIZE ) { annotate ( parameters ) ; } else if ( parameters . getStrategy ( ) == Strategy . SERVER ) { server ( parameters ) ; } else if ( parameters . getStrategy ( ) == Strategy . CLIENT ) { client ( parameters ) ; } else { System . out . println ( String . format ( "Invalid sub-command [%s]. Sub-commands accepted are: (tok|server|client)" , parameters . getStrategyString ( ) ) ) ; } } catch ( final ArgumentParserException e ) { cliArgumentsParser . handleError ( e ) ; System . out . println ( "Run java -jar target/ixa-pipe-tok-" + VERSION + ".jar (tok|server|client) -help for details" ) ; System . exit ( 1 ) ; }
public class MathUtils { /** * This returns the given column over an n arrays * @ param column the column to getFromOrigin values for * @ param nums the arrays to extract values from * @ return a double array containing all of the numbers in that column * for all of the arrays . * @ throws IllegalArgumentException if the index is < 0 */ private static double [ ] column ( int column , double [ ] ... nums ) throws IllegalArgumentException { } }
double [ ] ret = new double [ nums . length ] ; for ( int i = 0 ; i < nums . length ; i ++ ) { double [ ] curr = nums [ i ] ; ret [ i ] = curr [ column ] ; } return ret ;
public class AWSLicenseManagerClient { /** * Lists license configuration objects for an account , each containing the name , description , license type , and * other license terms modeled from a license agreement . * @ param listLicenseConfigurationsRequest * @ return Result of the ListLicenseConfigurations operation returned by the service . * @ throws InvalidParameterValueException * One or more parameter values are not valid . * @ throws ServerInternalException * The server experienced an internal error . Try again . * @ throws FilterLimitExceededException * The request uses too many filters or too many filter values . * @ throws AuthorizationException * The AWS user account does not have permission to perform the action . Check the IAM policy associated with * this account . * @ throws AccessDeniedException * Access to resource denied . * @ throws RateLimitExceededException * Too many requests have been submitted . Try again after a brief wait . * @ sample AWSLicenseManager . ListLicenseConfigurations * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / license - manager - 2018-08-01 / ListLicenseConfigurations " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListLicenseConfigurationsResult listLicenseConfigurations ( ListLicenseConfigurationsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListLicenseConfigurations ( request ) ;
public class HttpJsonSerializer { /** * Parses a suggestion query * @ return a hash map of key / value pairs * @ throws JSONException if parsing failed * @ throws BadRequestException if the content was missing or parsing failed */ @ Override public HashMap < String , String > parseSuggestV1 ( ) { } }
final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { return JSON . parseToObject ( query . getContent ( ) , new TypeReference < HashMap < String , String > > ( ) { } ) ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; }
public class MiniBatchGlobalGroupAggFunction { /** * The { @ code previousAcc } is accumulator , but input is a row in & lt ; key , accumulator & gt ; schema , * the specific generated { @ link # localAgg } will project the { @ code input } to accumulator * in merge method . */ @ Override public BaseRow addInput ( @ Nullable BaseRow previousAcc , BaseRow input ) throws Exception { } }
BaseRow currentAcc ; if ( previousAcc == null ) { currentAcc = localAgg . createAccumulators ( ) ; } else { currentAcc = previousAcc ; } localAgg . setAccumulators ( currentAcc ) ; localAgg . merge ( input ) ; return localAgg . getAccumulators ( ) ;
public class WebServiceCommunication { /** * Gets the response headers of specified uri . * @ param uri http / https uri * @ return response headers * @ throws IOException in case of any IO related issue */ public static Header [ ] headUri ( String uri ) throws IOException { } }
CloseableHttpClient c = newHttpClient ( new URL ( uri ) ) ; HttpHead head = new HttpHead ( uri ) ; LOG . debug ( "HEAD {}" , uri ) ; CloseableHttpResponse res = c . execute ( head , HttpClientContext . create ( ) ) ; checkResponse ( res ) ; return res . getAllHeaders ( ) ;
public class HttpsHealthCheckClient { /** * Updates a HttpsHealthCheck resource in the specified project using the data included in the * request . * < p > Sample code : * < pre > < code > * try ( HttpsHealthCheckClient httpsHealthCheckClient = HttpsHealthCheckClient . create ( ) ) { * ProjectGlobalHttpsHealthCheckName httpsHealthCheck = ProjectGlobalHttpsHealthCheckName . of ( " [ PROJECT ] " , " [ HTTPS _ HEALTH _ CHECK ] " ) ; * HttpsHealthCheck2 httpsHealthCheckResource = HttpsHealthCheck2 . newBuilder ( ) . build ( ) ; * List & lt ; String & gt ; fieldMask = new ArrayList & lt ; & gt ; ( ) ; * Operation response = httpsHealthCheckClient . updateHttpsHealthCheck ( httpsHealthCheck , httpsHealthCheckResource , fieldMask ) ; * < / code > < / pre > * @ param httpsHealthCheck Name of the HttpsHealthCheck resource to update . * @ param httpsHealthCheckResource An HttpsHealthCheck resource . This resource defines a template * for how individual instances should be checked for health , via HTTPS . * @ param fieldMask The fields that should be serialized ( even if they have empty values ) . If the * containing message object has a non - null fieldmask , then all the fields in the field mask * ( and only those fields in the field mask ) will be serialized . If the containing object does * not have a fieldmask , then only non - empty fields will be serialized . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation updateHttpsHealthCheck ( ProjectGlobalHttpsHealthCheckName httpsHealthCheck , HttpsHealthCheck2 httpsHealthCheckResource , List < String > fieldMask ) { } }
UpdateHttpsHealthCheckHttpRequest request = UpdateHttpsHealthCheckHttpRequest . newBuilder ( ) . setHttpsHealthCheck ( httpsHealthCheck == null ? null : httpsHealthCheck . toString ( ) ) . setHttpsHealthCheckResource ( httpsHealthCheckResource ) . addAllFieldMask ( fieldMask ) . build ( ) ; return updateHttpsHealthCheck ( request ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcServiceLifeFactorTypeEnum ( ) { } }
if ( ifcServiceLifeFactorTypeEnumEEnum == null ) { ifcServiceLifeFactorTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 896 ) ; } return ifcServiceLifeFactorTypeEnumEEnum ;
public class cuDoubleComplex { /** * Creates a new complex number consisting of the given real and * imaginary part . * @ param r The real part of the complex number * @ param i The imaginary part of the complex number * @ return A complex number with the given real and imaginary part */ public static cuDoubleComplex cuCmplx ( double r , double i ) { } }
cuDoubleComplex res = new cuDoubleComplex ( ) ; res . x = r ; res . y = i ; return res ;
public class AccountingDate { /** * Creates an { @ code AccountingDate } validating the input . * @ param chronology the Accounting chronology to base the date on , not null * @ param prolepticYear the Accounting proleptic - year * @ return the date in Accounting calendar system , not null * @ throws DateTimeException if the value of any field is out of range , * or if the day - of - year is invalid for the month - year , * NullPointerException if an AccountingChronology was not provided */ static AccountingDate create ( AccountingChronology chronology , int prolepticYear , int month , int dayOfMonth ) { } }
Objects . requireNonNull ( chronology , "A previously setup chronology is required." ) ; YEAR . checkValidValue ( prolepticYear ) ; chronology . range ( MONTH_OF_YEAR ) . checkValidValue ( month , MONTH_OF_YEAR ) ; if ( dayOfMonth < 1 || dayOfMonth > lengthOfMonth ( chronology , prolepticYear , month ) ) { if ( month == chronology . getLeapWeekInMonth ( ) && dayOfMonth < ( chronology . getDivision ( ) . getWeeksInMonth ( month ) + 1 ) * DAYS_IN_WEEK && ! chronology . isLeapYear ( prolepticYear ) ) { throw new DateTimeException ( "Invalid date '" + month + "/" + dayOfMonth + "' as '" + prolepticYear + "' is not a leap year" ) ; } else { throw new DateTimeException ( "Invalid date '" + month + "/" + dayOfMonth + "'" ) ; } } return new AccountingDate ( chronology , prolepticYear , month , dayOfMonth ) ;
public class SimplePicker { /** * Retrieves the list of recently selected examples from a file on the file system . * @ return the list of recently used examples . */ private List loadRecentList ( ) { } }
try { InputStream in = new BufferedInputStream ( new FileInputStream ( RECENT_FILE_NAME ) ) ; XMLDecoder d = new XMLDecoder ( in ) ; Object result = d . readObject ( ) ; d . close ( ) ; return ( List ) result ; } catch ( FileNotFoundException ex ) { // This is ok , it ' s probably the first time the picker has been used . return new ArrayList ( ) ; }
public class CounterValue { /** * Creates a new { @ link CounterValue } with the value and state based on the boundaries . * @ param value the counter ' s value . * @ param lowerBound the counter ' s lower bound . * @ param upperBound the counter ' s upper bound . * @ return the { @ link CounterValue } . */ public static CounterValue newCounterValue ( long value , long lowerBound , long upperBound ) { } }
return new CounterValue ( value , calculateState ( value , lowerBound , upperBound ) ) ;
public class ForwardCloseRequest { /** * Encode the connection path . * { @ link PaddedEPath # encode ( EPath , ByteBuf ) } can ' t be used here because the { @ link ForwardCloseRequest } has an * extra reserved byte after the connection path size for some reason . * @ param path the { @ link PaddedEPath } to encode . * @ param buffer the { @ link ByteBuf } to encode into . */ private static void encodeConnectionPath ( PaddedEPath path , ByteBuf buffer ) { } }
// length placeholder . . . int lengthStartIndex = buffer . writerIndex ( ) ; buffer . writeByte ( 0 ) ; // reserved buffer . writeZero ( 1 ) ; // encode the path segments . . . int dataStartIndex = buffer . writerIndex ( ) ; for ( EPathSegment segment : path . getSegments ( ) ) { if ( segment instanceof LogicalSegment ) { LogicalSegment . encode ( ( LogicalSegment ) segment , path . isPadded ( ) , buffer ) ; } else if ( segment instanceof PortSegment ) { PortSegment . encode ( ( PortSegment ) segment , path . isPadded ( ) , buffer ) ; } else if ( segment instanceof DataSegment ) { DataSegment . encode ( ( DataSegment ) segment , path . isPadded ( ) , buffer ) ; } else { throw new RuntimeException ( "no encoder for " + segment . getClass ( ) . getSimpleName ( ) ) ; } } // go back and update the length int bytesWritten = buffer . writerIndex ( ) - dataStartIndex ; int wordsWritten = bytesWritten / 2 ; buffer . markWriterIndex ( ) ; buffer . writerIndex ( lengthStartIndex ) ; buffer . writeByte ( wordsWritten ) ; buffer . resetWriterIndex ( ) ;