signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ToolbarLarge { /** * Obtains the color of the toolbar ' s title from a specific typed array . * @ param theme * The resource id of the theme , which should be applied on the toolbar , as an { @ link * Integer } value */ private void obtainTitleColor ( final int theme ) { } }
TypedArray typedArray = getContext ( ) . getTheme ( ) . obtainStyledAttributes ( theme , new int [ ] { android . R . attr . textColorPrimary } ) ; int textColorPrimary = typedArray . getResourceId ( 0 , 0 ) ; typedArray . recycle ( ) ; if ( textColorPrimary != 0 ) { int titleColor = ContextCompat . getColor ( getContext ( ) , textColorPrimary ) ; toolbar . setTitleTextColor ( titleColor ) ; }
public class ThreadUtil { /** * 结束线程 , 调用此方法后 , 线程将抛出 { @ link InterruptedException } 异常 * @ param thread 线程 * @ param isJoin 是否等待结束 */ public static void interupt ( Thread thread , boolean isJoin ) { } }
if ( null != thread && false == thread . isInterrupted ( ) ) { thread . interrupt ( ) ; if ( isJoin ) { waitForDie ( thread ) ; } }
public class PreparedStatement { /** * Sets parameter as DECIMAL . * @ throws SQLException if fails to set the big decimal value */ void setDecimal ( final int parameterIndex , final BigDecimal x ) throws SQLException { } }
final ParameterDef def = ( x == null ) ? Decimal : Decimal ( x ) ; setParam ( parameterIndex , def , x ) ;
public class DaryArrayAddressableHeap { /** * Ensure that the array representation has the necessary capacity . * @ param capacity * the requested capacity */ @ Override @ SuppressWarnings ( "unchecked" ) protected void ensureCapacity ( int capacity ) { } }
checkCapacity ( capacity ) ; ArrayHandle [ ] newArray = ( ArrayHandle [ ] ) Array . newInstance ( ArrayHandle . class , capacity + 1 ) ; System . arraycopy ( array , 1 , newArray , 1 , size ) ; array = newArray ;
public class SessionImpl { /** * Copy schema concepts labels to current KeyspaceCache * @ param tx */ private void copySchemaConceptLabelsToKeyspaceCache ( TransactionOLTP tx ) { } }
copyToCache ( tx . getMetaConcept ( ) ) ; copyToCache ( tx . getMetaRole ( ) ) ; copyToCache ( tx . getMetaRule ( ) ) ;
public class JaxRsModuleMetaDataListener { /** * declarative service */ protected void unsetMetaDataSlotService ( MetaDataSlotService slotService ) { } }
JaxRsMetaDataManager . jaxrsApplicationSlot = null ; JaxRsMetaDataManager . jaxrsModuleSlot = null ; JaxRsMetaDataManager . jaxrsComponentSlot = null ;
public class Packer { /** * Get Byte array ( encoded as : VInt - Length + bytes ) * @ return * @ see # putBytes ( byte [ ] ) */ public byte [ ] getBytes ( ) { } }
final int len = getVInt ( ) ; final byte [ ] bytes = new byte [ len ] ; System . arraycopy ( buf , bufPosition , bytes , 0 , bytes . length ) ; bufPosition += bytes . length ; return bytes ;
public class FragmentActivator { /** * Saves the loaded layout in the database for the user and profile . * @ param view * @ param owner * @ throws Exception */ private void saveLayout ( UserView view , IPerson owner ) throws Exception { } }
IUserProfile profile = new UserProfile ( ) ; profile . setProfileId ( view . getProfileId ( ) ) ; userLayoutStore . setUserLayout ( owner , profile , view . getLayout ( ) , true , false ) ;
public class AValue { /** * public Date getDateTimeItem ( ) { * return XmlAdapterUtils . unmarshall ( XMLGregorianCalendarAsDateTime . class , this . getDateTime ( ) ) ; * public void setDateTimeItem ( Date target ) { * setDateTime ( XmlAdapterUtils . marshall ( XMLGregorianCalendarAsDateTime . class , target ) ) ; */ @ Basic @ Column ( name = "GYEARITEM" ) @ Temporal ( TemporalType . DATE ) public Date getGYearItem ( ) { } }
// return XmlAdapterUtils . unmarshall ( XMLGregorianCalendarAsGYear . class , this . getGYear ( ) ) ; return ProvUtilities . toDate ( this . getGYear ( ) ) ;
public class SqlAgentImpl { /** * { @ inheritDoc } * @ see jp . co . future . uroborosql . AbstractAgent # batchInsert ( Class , Stream , jp . co . future . uroborosql . SqlAgent . InsertsCondition ) */ @ SuppressWarnings ( "unchecked" ) @ Override public < E > int batchInsert ( final Class < E > entityType , final Stream < E > entities , final InsertsCondition < ? super E > condition ) { } }
@ SuppressWarnings ( "rawtypes" ) EntityHandler handler = this . getEntityHandler ( ) ; if ( ! handler . getEntityType ( ) . isAssignableFrom ( entityType ) ) { throw new IllegalArgumentException ( "Entity type not supported" ) ; } try { TableMetadata metadata = handler . getMetadata ( this . transactionManager , entityType ) ; SqlContext context = handler . createBatchInsertContext ( this , metadata , entityType ) ; context . setSqlKind ( SqlKind . BATCH_INSERT ) ; // IDアノテーションが付与されたカラム情報を取得する MappingColumn [ ] idColumns = MappingUtils . getIdMappingColumns ( entityType ) ; setGeneratedKeyColumns ( context , idColumns , metadata ) ; int count = 0 ; List < E > entityList = new ArrayList < > ( ) ; for ( Iterator < E > iterator = entities . iterator ( ) ; iterator . hasNext ( ) ; ) { E entity = iterator . next ( ) ; if ( ! entityType . isInstance ( entity ) ) { throw new IllegalArgumentException ( "Entity types do not match" ) ; } entityList . add ( entity ) ; handler . setInsertParams ( context , entity ) ; context . addBatch ( ) ; if ( condition . test ( context , context . batchCount ( ) , entity ) ) { count += Arrays . stream ( doBatchInsert ( context , handler , entityList , idColumns ) ) . sum ( ) ; entityList . clear ( ) ; } } return count + ( context . batchCount ( ) != 0 ? Arrays . stream ( doBatchInsert ( context , handler , entityList , idColumns ) ) . sum ( ) : 0 ) ; } catch ( SQLException e ) { throw new EntitySqlRuntimeException ( SqlKind . BATCH_INSERT , e ) ; }
public class AbstractMBeanServerExecutor { /** * { @ inheritDoc } */ public void each ( ObjectName pObjectName , MBeanEachCallback pCallback ) throws IOException , ReflectionException , MBeanException { } }
try { Set < ObjectName > visited = new HashSet < ObjectName > ( ) ; for ( MBeanServerConnection server : getMBeanServers ( ) ) { // Query for a full name is the same as a direct lookup for ( ObjectName nameObject : server . queryNames ( pObjectName , null ) ) { // Don ' t add if already visited previously if ( ! visited . contains ( nameObject ) ) { pCallback . callback ( server , nameObject ) ; visited . add ( nameObject ) ; } } } } catch ( InstanceNotFoundException exp ) { // Well , should not happen , since we do a query before and the returned value are supposed to exist // on the mbean - server . But , who knows . . . throw new IllegalArgumentException ( "Cannot find MBean " + ( pObjectName != null ? "(MBean " + pObjectName + ")" : "" ) + ": " + exp , exp ) ; }
public class Record { /** * Set up all the fields for this record . */ public void setupFields ( ) { } }
FieldInfo field = null ; for ( int iFieldSeq = DBConstants . MAIN_FIELD ; iFieldSeq < 256 ; iFieldSeq ++ ) { field = this . setupField ( iFieldSeq ) ; // Allocate this Field ( may be overidden ) if ( field == null ) break ; // End of fields }
public class DecimalFormat { /** * First , read the default serializable fields from the stream . Then if * < code > serialVersionOnStream < / code > is less than 1 , indicating that the stream was * written by JDK 1.1 , initialize < code > useExponentialNotation < / code > to false , since * it was not present in JDK 1.1 . Finally , set serialVersionOnStream back to the * maximum allowed value so that default serialization will work properly if this * object is streamed out again . */ private void readObject ( ObjectInputStream stream ) throws IOException , ClassNotFoundException { } }
stream . defaultReadObject ( ) ; // Bug 4185761 validate fields [ Richard / GCL ] // We only need to check the maximum counts because NumberFormat . readObject has // already ensured that the maximum is greater than the minimum count . // Commented for compatibility with previous version , and reserved for further use // if ( getMaximumIntegerDigits ( ) > DOUBLE _ INTEGER _ DIGITS | | // getMaximumFractionDigits ( ) > DOUBLE _ FRACTION _ DIGITS ) { throw new // InvalidObjectException ( " Digit count out of range " ) ; } // Android changed : Allow 2 billion integer digits . // Truncate the maximumIntegerDigits to MAX _ INTEGER _ DIGITS and // maximumFractionDigits to DOUBLE _ FRACTION _ DIGITS if ( getMaximumIntegerDigits ( ) > MAX_INTEGER_DIGITS ) { setMaximumIntegerDigits ( MAX_INTEGER_DIGITS ) ; } if ( getMaximumFractionDigits ( ) > DOUBLE_FRACTION_DIGITS ) { _setMaximumFractionDigits ( DOUBLE_FRACTION_DIGITS ) ; } if ( serialVersionOnStream < 2 ) { exponentSignAlwaysShown = false ; setInternalRoundingIncrement ( null ) ; roundingMode = BigDecimal . ROUND_HALF_EVEN ; formatWidth = 0 ; pad = ' ' ; padPosition = PAD_BEFORE_PREFIX ; if ( serialVersionOnStream < 1 ) { // Didn ' t have exponential fields useExponentialNotation = false ; } } if ( serialVersionOnStream < 3 ) { // Versions prior to 3 do not store a currency object . Create one to match // the DecimalFormatSymbols object . setCurrencyForSymbols ( ) ; } if ( serialVersionOnStream < 4 ) { currencyUsage = CurrencyUsage . STANDARD ; } serialVersionOnStream = currentSerialVersion ; digitList = new DigitList ( ) ; if ( roundingIncrement != null ) { setInternalRoundingIncrement ( new BigDecimal ( roundingIncrement ) ) ; } resetActualRounding ( ) ;
public class VirtualNetworkGatewaysInner { /** * Gets a xml format representation for supported vpn devices . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the String object */ public Observable < ServiceResponse < String > > supportedVpnDevicesWithServiceResponseAsync ( String resourceGroupName , String virtualNetworkGatewayName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( virtualNetworkGatewayName == null ) { throw new IllegalArgumentException ( "Parameter virtualNetworkGatewayName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } final String apiVersion = "2017-10-01" ; return service . supportedVpnDevices ( resourceGroupName , virtualNetworkGatewayName , this . client . subscriptionId ( ) , apiVersion , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < String > > > ( ) { @ Override public Observable < ServiceResponse < String > > call ( Response < ResponseBody > response ) { try { ServiceResponse < String > clientResponse = supportedVpnDevicesDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class KinesisIndexTaskIOConfig { /** * This method is for compatibilty so that newer version of KinesisIndexTaskIOConfig can be read by * old version of Druid . Note that this method returns end sequence numbers instead of start . This is because * { @ link SeekableStreamStartSequenceNumbers } didn ' t exist before . * A SeekableStreamEndSequenceNumbers ( has no exclusivity info ) is returned here because the Kinesis extension * previously stored exclusivity info separately in exclusiveStartSequenceNumberPartitions . */ @ JsonProperty @ Deprecated public SeekableStreamEndSequenceNumbers < String , String > getStartPartitions ( ) { } }
final SeekableStreamStartSequenceNumbers < String , String > startSequenceNumbers = getStartSequenceNumbers ( ) ; return new SeekableStreamEndSequenceNumbers < > ( startSequenceNumbers . getStream ( ) , startSequenceNumbers . getPartitionSequenceNumberMap ( ) ) ;
public class TableIterator { /** * region AsyncIterator Implementation */ @ Override public CompletableFuture < T > getNext ( ) { } }
// Verify no other call to getNext ( ) is currently executing . return getNextBucket ( ) . thenCompose ( bucket -> { if ( bucket == null ) { // We are done . return CompletableFuture . completedFuture ( null ) ; } else { // Convert the TableBucket into the desired result . return this . resultConverter . apply ( bucket ) ; } } ) ;
public class AbstractSpringBeanDefinitionParser { /** * { @ inheritDoc } */ @ Override public void doParse ( Element element , BeanDefinitionBuilder builder ) { } }
builder . addPropertyReference ( "bundleContext" , "bundleContext" ) ; prepareInjection ( element , builder ) ; builder . setLazyInit ( false ) ; builder . setInitMethodName ( "start" ) ; builder . setDestroyMethodName ( "stop" ) ; super . doParse ( element , builder ) ;
public class CholeskyOuterSolver_DDRB { /** * If X = = null then the solution is written into B . Otherwise the solution is copied * from B into X . */ @ Override public void solve ( DMatrixRBlock B , DMatrixRBlock X ) { } }
if ( B . blockLength != blockLength ) throw new IllegalArgumentException ( "Unexpected blocklength in B." ) ; DSubmatrixD1 L = new DSubmatrixD1 ( decomposer . getT ( null ) ) ; if ( X != null ) { if ( X . blockLength != blockLength ) throw new IllegalArgumentException ( "Unexpected blocklength in X." ) ; if ( X . numRows != L . col1 ) throw new IllegalArgumentException ( "Not enough rows in X" ) ; } if ( B . numRows != L . col1 ) throw new IllegalArgumentException ( "Not enough rows in B" ) ; // L * L ^ T * X = B // Solve for Y : L * Y = B TriangularSolver_DDRB . solve ( blockLength , false , L , new DSubmatrixD1 ( B ) , false ) ; // L ^ T * X = Y TriangularSolver_DDRB . solve ( blockLength , false , L , new DSubmatrixD1 ( B ) , true ) ; if ( X != null ) { // copy the solution from B into X MatrixOps_DDRB . extractAligned ( B , X ) ; }
public class RoleDefinitionsInner { /** * Deletes a role definition . * @ param scope The scope of the role definition . * @ param roleDefinitionId The ID of the role definition to delete . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the RoleDefinitionInner object */ public Observable < RoleDefinitionInner > deleteAsync ( String scope , String roleDefinitionId ) { } }
return deleteWithServiceResponseAsync ( scope , roleDefinitionId ) . map ( new Func1 < ServiceResponse < RoleDefinitionInner > , RoleDefinitionInner > ( ) { @ Override public RoleDefinitionInner call ( ServiceResponse < RoleDefinitionInner > response ) { return response . body ( ) ; } } ) ;
public class FFDC { /** * Dump an Object to a java . io . PrintWriter . * @ param Object to be dumped to printWriter * @ param java . io . PeritWriter to dump the Object to . */ static void print ( Object object , java . io . PrintWriter printWriter ) { } }
if ( object instanceof Printable ) { ( ( Printable ) object ) . print ( printWriter ) ; } else { printWriter . print ( object ) ; } // if ( object instanceof Printable ) .
public class DataService { /** * Method to upload the file for the given entity in asynchronous fashion * @ param entity * the entity * @ param docContent * the content of the file to upload * @ param callbackHandler * the callback handler * @ throws FMSException * throws FMSException */ public < T extends IEntity > void uploadAsync ( T entity , InputStream docContent , CallbackHandler callbackHandler ) throws FMSException { } }
IntuitMessage intuitMessage = prepareUpload ( entity , docContent ) ; // set callback handler intuitMessage . getRequestElements ( ) . setCallbackHandler ( callbackHandler ) ; // execute async interceptors executeAsyncInterceptors ( intuitMessage ) ;
public class KnownMimeTypes { /** * Gets a { @ link MediaType } for the extension of a file or url . * @ param extension the extension , without the " . " * @ return the parsed media type if known , { @ literal null } otherwise . * @ since 0.8.1 */ public static MediaType getMediaTypeByExtension ( String extension ) { } }
final String input = EXTENSIONS . get ( extension ) ; if ( input == null ) { return null ; } return MediaType . parse ( input ) ;
public class MultiVertexGeometryImpl { /** * Checked vs . Jan 11 , 2011 */ @ Override public Envelope1D queryInterval ( int semantics , int ordinate ) { } }
Envelope1D env = new Envelope1D ( ) ; if ( isEmptyImpl ( ) ) { env . setEmpty ( ) ; return env ; } _updateAllDirtyIntervals ( true ) ; return m_envelope . queryInterval ( semantics , ordinate ) ;
public class NodeImpl { /** * { @ inheritDoc } */ public void setPermissions ( Map permissions ) throws RepositoryException , AccessDeniedException , AccessControlException { } }
checkValid ( ) ; if ( ! isNodeType ( Constants . EXO_PRIVILEGEABLE ) ) { throw new AccessControlException ( "Node is not exo:privilegeable " + getPath ( ) ) ; } if ( permissions . size ( ) == 0 ) { throw new RepositoryException ( "Permission map size cannot be 0" ) ; } checkPermission ( PermissionType . CHANGE_PERMISSION ) ; List < AccessControlEntry > aces = new ArrayList < AccessControlEntry > ( ) ; for ( Iterator < String > i = permissions . keySet ( ) . iterator ( ) ; i . hasNext ( ) ; ) { String identity = i . next ( ) ; if ( identity == null ) { throw new RepositoryException ( "Identity cannot be null" ) ; } String [ ] perm = ( String [ ] ) permissions . get ( identity ) ; if ( perm == null ) { throw new RepositoryException ( "Permissions cannot be null" ) ; } for ( int j = 0 ; j < perm . length ; j ++ ) { AccessControlEntry ace = new AccessControlEntry ( identity , perm [ j ] ) ; aces . add ( ace ) ; } } AccessControlList acl = new AccessControlList ( getACL ( ) . getOwner ( ) , aces ) ; setACL ( acl ) ; updatePermissions ( acl ) ;
public class XRuntimeUtils { /** * Retrieves the path of the platform - dependent OpenXES * support folder . */ public static String getSupportFolder ( ) { } }
String homedir = System . getProperty ( "user.home" ) ; String dirName = "OpenXES" ; if ( isRunningWindows ( ) ) { // any windows flavor ( new File ( homedir + "\\" + dirName ) ) . mkdirs ( ) ; // create directories if necessary return homedir + "\\" + dirName + "\\" ; } else if ( isRunningMacOsX ( ) ) { // hey , it ' s a mac ! ( new File ( homedir + "/Library/Application Support/" + dirName ) ) . mkdirs ( ) ; return homedir + "/Library/Application Support/" + dirName + "/" ; } else { // most likely Linux or any other * NIX ( new File ( homedir + "/." + dirName ) ) . mkdirs ( ) ; // create directories if necessary return homedir + "/." + dirName + "/" ; }
public class StandardScheduler { /** * Registers pageables . * If the pageable has specified a page interval > 0 , it will be paged regularly . * @ param pageable */ public void register ( Pageable pageable ) { } }
System . out . println ( pagedSystemObjectNames ) ; if ( ! pagedSystemObjectNames . contains ( pageable . toString ( ) ) ) { if ( pageable . getPageIntervalInMinutes ( ) <= 0 ) { System . out . println ( new LogEntry ( Level . VERBOSE , "scheduler will not page " + StringSupport . trim ( pageable . toString ( ) + "'" , 80 , "..." ) + ": interval in minutes (" + pageable . getPageIntervalInMinutes ( ) + ") is not valid" ) ) ; } else { System . out . println ( new LogEntry ( Level . VERBOSE , "scheduler will page " + StringSupport . trim ( pageable . toString ( ) + "'" , 80 , "..." ) + " every " + pageable . getPageIntervalInMinutes ( ) + " minute(s)" ) ) ; } synchronized ( pagedSystems ) { pagedSystems . add ( pageable ) ; pagedSystemObjectNames . add ( pageable . toString ( ) ) ; } } else { System . out . println ( new LogEntry ( "pageable " + pageable + " already registered in scheduler" ) ) ; }
public class VictimsSQL { /** * Helper function to execute all pending patch operations in the given * { @ link PreparedStatement } s and close it . * @ param preparedStatements * @ throws SQLException */ protected void executeBatchAndClose ( PreparedStatement ... preparedStatements ) throws SQLException { } }
for ( PreparedStatement ps : preparedStatements ) { ps . executeBatch ( ) ; ps . clearBatch ( ) ; ps . close ( ) ; }
public class LocalXAResourceImpl { /** * { @ inheritDoc } */ public void rollback ( Xid xid ) throws XAException { } }
if ( ! xid . equals ( currentXid ) ) { throw new LocalXAException ( bundle . wrongXidInRollback ( currentXid , xid ) , XAException . XAER_PROTO ) ; } currentXid = null ; try { cl . getManagedConnection ( ) . getLocalTransaction ( ) . rollback ( ) ; } catch ( LocalResourceException lre ) { connectionManager . returnConnectionListener ( cl , true ) ; throw new LocalXAException ( bundle . couldNotRollbackLocalTx ( ) , XAException . XAER_RMFAIL , lre ) ; } catch ( ResourceException re ) { connectionManager . returnConnectionListener ( cl , true ) ; throw new LocalXAException ( bundle . couldNotRollbackLocalTx ( ) , XAException . XAER_RMERR , re ) ; }
public class ComputationGraph { /** * Calculate the score for each example in a DataSet individually . Unlike { @ link # score ( DataSet ) } and { @ link # score ( DataSet , boolean ) } * this method does not average / sum over examples . This method allows for examples to be scored individually ( at test time only ) , which * may be useful for example for autoencoder architectures and the like . < br > * Each row of the output ( assuming addRegularizationTerms = = true ) is equivalent to calling score ( DataSet ) with a single example . * @ param data The data to score * @ param addRegularizationTerms If true : add l1 / l2 regularization terms ( if any ) to the score . If false : don ' t add regularization terms * @ return An INDArray ( column vector ) of size input . numRows ( ) ; the ith entry is the score ( loss value ) of the ith example */ public INDArray scoreExamples ( DataSet data , boolean addRegularizationTerms ) { } }
if ( numInputArrays != 1 || numOutputArrays != 1 ) throw new UnsupportedOperationException ( "Cannot score ComputationGraph network with " + " DataSet: network does not have 1 input and 1 output arrays" ) ; return scoreExamples ( ComputationGraphUtil . toMultiDataSet ( data ) , addRegularizationTerms ) ;
public class PluginSkeleton { /** * Sets the name of the plugin and notifies * PropertyChangeListeners of the change . * @ param newName the name of the plugin to set . */ public void setName ( final String newName ) { } }
String oldName = this . name ; this . name = newName ; propertySupport . firePropertyChange ( "name" , oldName , this . name ) ;
public class UnderReplicatedBlocks { /** * Return the number of under replication blocks of priority */ synchronized int size ( int priority ) { } }
if ( priority < 0 || priority >= LEVEL ) { throw new IllegalArgumentException ( "Unsupported priority: " + priority ) ; } return priorityQueues . get ( priority ) . size ( ) ;
public class CalendarConverter { /** * < p > Obtains a suitable chronology for given locale . < / p > * < p > First the unicode - ca - extension of given locale is queried . If not available then ISO - 8601 will be * chosen . Otherwise , all available implementations of { @ link CalendarProvider } will be queried if they * can deliver a suitable chronology . < / p > * @ param locale the locale to be queried * @ return new bridge chronology for general calendar dates * @ throws IllegalArgumentException if the ca - extension of given locale does not point to any available calendar */ static Chronology < CalendarDate > getChronology ( Locale locale ) { } }
String name = locale . getUnicodeLocaleType ( "ca" ) ; if ( name == null ) { switch ( FormatUtils . getRegion ( locale ) ) { case "AF" : case "IR" : name = "persian" ; break ; case "SA" : name = "islamic-umalqura" ; break ; case "TH" : name = "buddhist" ; break ; default : return adapt ( PlainDate . axis ( ) , "" ) ; // ISO - 8601 as default } } else if ( name . equals ( "ethiopic-amete-alem" ) ) { name = "ethioaa" ; } else if ( name . equals ( "islamicc" ) ) { name = "islamic-civil" ; } else if ( name . equals ( "islamic" ) ) { name = "islamic-icu4j" ; // TODO : astro - variant } for ( CalendarProvider provider : ResourceLoader . getInstance ( ) . services ( CalendarProvider . class ) ) { Optional < Chronology < ? extends CalendarDate > > c = provider . findChronology ( name ) ; if ( c . isPresent ( ) ) { String calendarVariant = "" ; if ( name . equals ( "historic" ) ) { calendarVariant = ChronoHistory . of ( locale ) . getVariant ( ) ; } else if ( name . indexOf ( '-' ) > 0 ) { calendarVariant = name ; } return adapt ( c . get ( ) , calendarVariant ) ; } } throw new IllegalArgumentException ( "Could not find any calendar for: " + name ) ;
public class Cob2JaxbGenerator { /** * Given a COBOL copybook in a file , produce a set of java classes ( source * code ) used to convert mainframe data ( matching the copybook ) to JAXB * instances . * @ param cobolFile the COBOL copybook file * @ param cobolFileEncoding the COBOL copybook file character encoding * @ param targetFolder the target folder * @ param targetPackageName the java package the generated classes should * reside in * @ param xsltFileName an optional xslt to apply on the XML Schema */ public void generate ( File cobolFile , String cobolFileEncoding , File targetFolder , String targetPackageName , final String xsltFileName ) { } }
try { Reader reader = cobolFileEncoding == null ? new InputStreamReader ( new FileInputStream ( cobolFile ) ) : new InputStreamReader ( new FileInputStream ( cobolFile ) , cobolFileEncoding ) ; generate ( reader , targetFolder , targetPackageName , xsltFileName ) ; } catch ( UnsupportedEncodingException e ) { throw new Cob2JaxbGeneratorException ( e ) ; } catch ( FileNotFoundException e ) { throw new Cob2JaxbGeneratorException ( e ) ; }
public class JdkCompiler { /** * ( non - Javadoc ) * @ see com . baidu . bjf . remoting . protobuf . utils . compiler . AbstractCompiler # doCompile ( java . lang . String , * java . lang . String , java . io . OutputStream ) */ @ Override public synchronized Class < ? > doCompile ( String name , String sourceCode , OutputStream os ) throws Throwable { } }
if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Begin to compile source code: class is '{}'" , name ) ; } int i = name . lastIndexOf ( '.' ) ; String packageName = i < 0 ? "" : name . substring ( 0 , i ) ; String className = i < 0 ? name : name . substring ( i + 1 ) ; JavaFileObjectImpl javaFileObject = new JavaFileObjectImpl ( className , sourceCode ) ; javaFileManager . putFileForInput ( StandardLocation . SOURCE_PATH , packageName , className + ClassUtils . JAVA_EXTENSION , javaFileObject ) ; DiagnosticCollector < JavaFileObject > diagnosticCollector = new DiagnosticCollector < JavaFileObject > ( ) ; Boolean result = compiler . getTask ( null , javaFileManager , diagnosticCollector , options , null , Arrays . asList ( new JavaFileObject [ ] { javaFileObject } ) ) . call ( ) ; if ( result == null || ! result . booleanValue ( ) ) { throw new IllegalStateException ( "Compilation failed. class: " + name + ", diagnostics: " + diagnosticCollector . getDiagnostics ( ) ) ; } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "compile source code done: class is '{}'" , name ) ; LOGGER . debug ( "loading class '{}'" , name ) ; } Class < ? > retClass = classLoader . loadClass ( name ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "loading class done '{}'" , name ) ; } if ( os != null ) { byte [ ] bytes = classLoader . loadClassBytes ( name ) ; if ( bytes != null ) { os . write ( bytes ) ; os . flush ( ) ; } } return retClass ;
public class CmsWorkflowResources { /** * Returns true if there are too many resources . < p > * @ return true if there are too many resources */ public boolean isTooMany ( ) { } }
return ( m_tooManyCount != null ) || ( ( m_workflowResources != null ) && ( m_workflowResources . size ( ) > OpenCms . getWorkflowManager ( ) . getResourceLimit ( ) ) ) ;
public class Gram { /** * Deactivates all callback handlers . */ public static void deactivateAllCallbackHandlers ( ) { } }
synchronized ( callbackHandlers ) { Enumeration e = callbackHandlers . elements ( ) ; while ( e . hasMoreElements ( ) ) { CallbackHandler handler = ( CallbackHandler ) e . nextElement ( ) ; handler . shutdown ( ) ; } callbackHandlers . clear ( ) ; }
public class JPAFieldOperationsImpl { /** * Creates a One - to - One relationship * @ param project * @ param resource * @ param fieldName * @ param fieldType * @ param inverseFieldName * @ param fetchType * @ param required * @ param cascadeTypes * @ throws FileNotFoundException */ @ Override public void newOneToOneRelationship ( Project project , final JavaResource resource , final String fieldName , final String fieldType , final String inverseFieldName , final FetchType fetchType , final boolean required , final Iterable < CascadeType > cascadeTypes ) throws FileNotFoundException { } }
JavaSourceFacet java = project . getFacet ( JavaSourceFacet . class ) ; JavaClassSource entityClass = getJavaClassFrom ( resource ) ; JavaClassSource fieldEntityClass ; if ( areTypesSame ( fieldType , entityClass . getCanonicalName ( ) ) ) { fieldEntityClass = entityClass ; } else { fieldEntityClass = findEntity ( project , fieldType ) ; entityClass . addImport ( fieldEntityClass ) ; } FieldSource < JavaClassSource > localField = addFieldTo ( entityClass , fieldEntityClass . getName ( ) , fieldName , OneToOne . class . getName ( ) ) ; AnnotationSource < JavaClassSource > annotation = localField . getAnnotation ( OneToOne . class ) ; if ( ( inverseFieldName != null ) && ! inverseFieldName . isEmpty ( ) ) { FieldSource < JavaClassSource > inverseField = addFieldTo ( fieldEntityClass , entityClass . getName ( ) , inverseFieldName , OneToOne . class . getName ( ) ) ; inverseField . getAnnotation ( OneToOne . class ) . setStringValue ( "mappedBy" , localField . getName ( ) ) ; java . saveJavaSource ( fieldEntityClass ) ; } if ( fetchType != null && fetchType != FetchType . EAGER ) { annotation . setEnumValue ( "fetch" , fetchType ) ; } if ( required ) { // Set the optional attribute of @ OneToOne / @ ManyToOne only when false , since the default value is true annotation . setLiteralValue ( "optional" , "false" ) ; } addCascade ( cascadeTypes , annotation ) ; java . saveJavaSource ( entityClass ) ;
public class NettyServerBuilder { /** * Sets a custom keepalive timeout , the timeout for keepalive ping requests . An unreasonably small * value might be increased . * @ since 1.3.0 */ public NettyServerBuilder keepAliveTimeout ( long keepAliveTimeout , TimeUnit timeUnit ) { } }
checkArgument ( keepAliveTimeout > 0L , "keepalive timeout must be positive" ) ; keepAliveTimeoutInNanos = timeUnit . toNanos ( keepAliveTimeout ) ; keepAliveTimeoutInNanos = KeepAliveManager . clampKeepAliveTimeoutInNanos ( keepAliveTimeoutInNanos ) ; if ( keepAliveTimeoutInNanos < MIN_KEEPALIVE_TIMEOUT_NANO ) { // Bump keepalive timeout . keepAliveTimeoutInNanos = MIN_KEEPALIVE_TIMEOUT_NANO ; } return this ;
public class HtmlMessages { /** * < p > Return the value of the < code > fatalClass < / code > property . < / p > * < p > Contents : CSS style class to apply to any message * with a severity class of " FATAL " . */ public java . lang . String getFatalClass ( ) { } }
return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . fatalClass ) ;
public class sdx_backup_restore { /** * < pre > * Use this operation to Backup . * < / pre > */ public static sdx_backup_restore backup ( nitro_service client ) throws Exception { } }
sdx_backup_restore resource = new sdx_backup_restore ( ) ; return ( ( sdx_backup_restore [ ] ) resource . perform_operation ( client , "backup" ) ) [ 0 ] ;
public class CompositeByteBuf { /** * but we do in the most common case that the Iterable is a Collection ) */ private CompositeByteBuf addComponents ( boolean increaseIndex , int cIndex , Iterable < ByteBuf > buffers ) { } }
if ( buffers instanceof ByteBuf ) { // If buffers also implements ByteBuf ( e . g . CompositeByteBuf ) , it has to go to addComponent ( ByteBuf ) . return addComponent ( increaseIndex , cIndex , ( ByteBuf ) buffers ) ; } checkNotNull ( buffers , "buffers" ) ; Iterator < ByteBuf > it = buffers . iterator ( ) ; try { checkComponentIndex ( cIndex ) ; // No need for consolidation while ( it . hasNext ( ) ) { ByteBuf b = it . next ( ) ; if ( b == null ) { break ; } cIndex = addComponent0 ( increaseIndex , cIndex , b ) + 1 ; cIndex = Math . min ( cIndex , componentCount ) ; } } finally { while ( it . hasNext ( ) ) { ReferenceCountUtil . safeRelease ( it . next ( ) ) ; } } consolidateIfNeeded ( ) ; return this ;
public class MongoDBCollection { /** * Bulk update */ public QueryResult < BulkWriteResult > update ( List < DBObject > queries , List < DBObject > updates , QueryOptions options ) { } }
startQuery ( ) ; boolean upsert = false ; boolean multi = false ; if ( options != null ) { upsert = options . getBoolean ( UPSERT ) ; multi = options . getBoolean ( MULTI ) ; } BulkWriteResult wr = mongoDBNativeQuery . update ( queries , updates , upsert , multi ) ; QueryResult < BulkWriteResult > queryResult = endQuery ( Arrays . asList ( wr ) ) ; return queryResult ;
public class Scheduler { /** * We do not want a single failure to affect the whole scheduler . * @ param job job to execute . */ void executeInABox ( final Job job ) { } }
try { job . execute ( ) ; } catch ( Exception e ) { System . err . println ( "The execution of the job threw an exception" ) ; e . printStackTrace ( System . err ) ; }
public class JSONArray { /** * Get the optional object value associated with an index . * @ param index The index must be between 0 and length ( ) - 1. * @ return An object value , or null if there is no object at that index . */ public Object opt ( int index ) { } }
return ( index < 0 || index >= this . length ( ) ) ? null : this . list . get ( index ) ;
public class LibertyFeaturesToMavenRepo { /** * Generate POM file . * @ param feature The Liberty feature to generate POM for . * @ param featureCompileDependencies List of compile dependencies that the feature should provide . * @ param allFeatures The map of feature symbolic names to LibertyFeature objects . * @ param outputDir The root directory of the target Maven repository . * @ param type The type of artifact . * @ throws MavenRepoGeneratorException If the POM file could not be written . */ private void generatePom ( LibertyFeature feature , List < MavenCoordinates > featureCompileDependencies , Map < String , LibertyFeature > allFeatures , File outputDir , Constants . ArtifactType type ) throws MavenRepoGeneratorException { } }
MavenCoordinates coordinates = feature . getMavenCoordinates ( ) ; Model model = new Model ( ) ; model . setModelVersion ( Constants . MAVEN_MODEL_VERSION ) ; model . setGroupId ( coordinates . getGroupId ( ) ) ; model . setArtifactId ( coordinates . getArtifactId ( ) ) ; model . setVersion ( coordinates . getVersion ( ) ) ; model . setName ( feature . getName ( ) ) ; model . setDescription ( feature . getDescription ( ) ) ; model . setPackaging ( type . getType ( ) ) ; setLicense ( model , coordinates . getVersion ( ) , true , feature . isRestrictedLicense ( ) , Constants . WEBSPHERE_LIBERTY_FEATURES_GROUP_ID . equals ( coordinates . getGroupId ( ) ) ) ; boolean isWebsphereLiberty = Constants . WEBSPHERE_LIBERTY_FEATURES_GROUP_ID . equals ( coordinates . getGroupId ( ) ) ; if ( ! isWebsphereLiberty ) { setScmDevUrl ( model ) ; } List < Dependency > dependencies = new ArrayList < Dependency > ( ) ; model . setDependencies ( dependencies ) ; // ESA depends on other ESAs List < LibertyFeature > requiredFeatures = getRequiredFeatures ( feature , allFeatures ) ; if ( ! requiredFeatures . isEmpty ( ) ) { for ( LibertyFeature requiredFeature : requiredFeatures ) { MavenCoordinates requiredArtifact = requiredFeature . getMavenCoordinates ( ) ; addDependency ( dependencies , requiredArtifact , type , null ) ; } } if ( featureCompileDependencies != null ) { for ( MavenCoordinates requiredArtifact : featureCompileDependencies ) { addDependency ( dependencies , requiredArtifact , null , null ) ; } } File artifactDir = new File ( outputDir , Utils . getRepositorySubpath ( coordinates ) ) ; File targetFile = new File ( artifactDir , Utils . getFileName ( coordinates , Constants . ArtifactType . POM ) ) ; try { Writer writer = new FileWriter ( targetFile ) ; new MavenXpp3Writer ( ) . write ( writer , model ) ; writer . close ( ) ; } catch ( IOException e ) { throw new MavenRepoGeneratorException ( "Could not write POM file " + targetFile , e ) ; }
public class ForkJoinPool { /** * Tries to create and start a worker */ private void addWorker ( ) { } }
Throwable ex = null ; ForkJoinWorkerThread wt = null ; try { if ( ( wt = factory . newThread ( this ) ) != null ) { wt . start ( ) ; return ; } } catch ( Throwable e ) { ex = e ; } deregisterWorker ( wt , ex ) ; // adjust counts etc on failure
public class HttpComponentsClientHttpRequestFactory { /** * Apply the specified socket timeout to deprecated { @ link HttpClient } * implementations . See { @ link # setLegacyConnectionTimeout } . * @ param client the client to configure * @ param timeout the custom socket timeout * @ see # setLegacyConnectionTimeout */ @ SuppressWarnings ( "deprecation" ) private void setLegacySocketTimeout ( HttpClient client , int timeout ) { } }
if ( org . apache . http . impl . client . AbstractHttpClient . class . isInstance ( client ) ) { client . getParams ( ) . setIntParameter ( org . apache . http . params . CoreConnectionPNames . SO_TIMEOUT , timeout ) ; }
public class RoutesEndpoint { /** * Expose Zuul { @ link Route } information with details . * @ param format used to determine whether only locations or route details should be * provided * @ return a map of routes and their details */ @ ReadOperation public Object invokeRouteDetails ( @ Selector String format ) { } }
if ( FORMAT_DETAILS . equalsIgnoreCase ( format ) ) { return invokeRouteDetails ( ) ; } else { return invoke ( ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcVertex ( ) { } }
if ( ifcVertexEClass == null ) { ifcVertexEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 750 ) ; } return ifcVertexEClass ;
public class ip6tunnelparam { /** * Use this API to fetch all the ip6tunnelparam resources that are configured on netscaler . */ public static ip6tunnelparam get ( nitro_service service ) throws Exception { } }
ip6tunnelparam obj = new ip6tunnelparam ( ) ; ip6tunnelparam [ ] response = ( ip6tunnelparam [ ] ) obj . get_resources ( service ) ; return response [ 0 ] ;
public class FctForSrhAccSecure { /** * < p > Get PrcRevealTaxCat ( create and put into map ) . < / p > * @ param pAddParam additional param * @ return requested PrcRevealTaxCat * @ throws Exception - an exception */ protected final PrcRevealTaxCat < RS > createPutPrcRevealTaxCat ( final Map < String , Object > pAddParam ) throws Exception { } }
PrcRevealTaxCat < RS > proc = new PrcRevealTaxCat < RS > ( ) ; proc . setLogger ( getLogger ( ) ) ; proc . setSrvDatabase ( getSrvDatabase ( ) ) ; // assigning fully initialized object : this . processorsMap . put ( PrcRevealTaxCat . class . getSimpleName ( ) , proc ) ; return proc ;
public class GeneralPath { /** * Create a filled path of the specified Java 2D Shape and color . * @ param shape Java 2D shape * @ param color the color to fill the shape with * @ return a new general path */ public static GeneralPath shapeOf ( Shape shape , Color color ) { } }
List < PathElement > elements = new ArrayList < PathElement > ( ) ; PathIterator pathIt = shape . getPathIterator ( new AffineTransform ( ) ) ; double [ ] data = new double [ 6 ] ; while ( ! pathIt . isDone ( ) ) { switch ( pathIt . currentSegment ( data ) ) { case PathIterator . SEG_MOVETO : elements . add ( new MoveTo ( data ) ) ; break ; case PathIterator . SEG_LINETO : elements . add ( new LineTo ( data ) ) ; break ; case PathIterator . SEG_CLOSE : elements . add ( new Close ( ) ) ; break ; case PathIterator . SEG_QUADTO : elements . add ( new QuadTo ( data ) ) ; break ; case PathIterator . SEG_CUBICTO : elements . add ( new CubicTo ( data ) ) ; break ; } pathIt . next ( ) ; } return new GeneralPath ( elements , color , pathIt . getWindingRule ( ) , 0d , true ) ;
public class Broker { /** * Creates an event queue ( thread safe ) with the correct binding . * @ param channel Name of event handled by queue . */ private synchronized void createChannel ( String channel ) { } }
if ( ! channelExists ( channel ) ) { Queue queue = new Queue ( channel , true , false , true ) ; admin . declareQueue ( queue ) ; Binding binding = new Binding ( channel , DestinationType . QUEUE , exchange . getName ( ) , channel + ".#" , null ) ; admin . declareBinding ( binding ) ; }
public class IndirectJndiLookupObjectFactory { /** * Try to obtain an object instance by creating a resource using a * ResourceFactory . * @ return the object instance , or null if an object could not be found */ private Object createResource ( String refName , String className , String bindingName , ResourceInfo resourceRefInfo ) throws Exception { } }
String nameFilter = FilterUtils . createPropertyFilter ( ResourceFactory . JNDI_NAME , bindingName ) ; String createsFilter = className == null ? null : FilterUtils . createPropertyFilter ( ResourceFactory . CREATES_OBJECT_CLASS , className ) ; String filter = createsFilter == null ? nameFilter : "(&" + nameFilter + createsFilter + ")" ; ResourceInfo resInfo = resourceRefInfo != null ? resourceRefInfo : className != null ? new ResourceEnvRefInfo ( refName , className ) : null ; return createResourceWithFilter ( filter , resInfo ) ;
public class sms_server { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
sms_server_responses result = ( sms_server_responses ) service . get_payload_formatter ( ) . string_to_resource ( sms_server_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . sms_server_response_array ) ; } sms_server [ ] result_sms_server = new sms_server [ result . sms_server_response_array . length ] ; for ( int i = 0 ; i < result . sms_server_response_array . length ; i ++ ) { result_sms_server [ i ] = result . sms_server_response_array [ i ] . sms_server [ 0 ] ; } return result_sms_server ;
public class DOMDifferenceEngine { /** * Separates XML namespace related attributes from " normal " attributes . xb */ private Attributes splitAttributes ( final NamedNodeMap map ) { } }
Attr sLoc = ( Attr ) map . getNamedItemNS ( XMLConstants . W3C_XML_SCHEMA_INSTANCE_NS_URI , "schemaLocation" ) ; Attr nNsLoc = ( Attr ) map . getNamedItemNS ( XMLConstants . W3C_XML_SCHEMA_INSTANCE_NS_URI , "noNamespaceSchemaLocation" ) ; Attr type = ( Attr ) map . getNamedItemNS ( XMLConstants . W3C_XML_SCHEMA_INSTANCE_NS_URI , "type" ) ; List < Attr > rest = new LinkedList < Attr > ( ) ; final int len = map . getLength ( ) ; for ( int i = 0 ; i < len ; i ++ ) { Attr a = ( Attr ) map . item ( i ) ; if ( ! XMLConstants . XMLNS_ATTRIBUTE_NS_URI . equals ( a . getNamespaceURI ( ) ) && a != sLoc && a != nNsLoc && a != type && getAttributeFilter ( ) . test ( a ) ) { rest . add ( a ) ; } } return new Attributes ( sLoc , nNsLoc , type , rest ) ;
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Returns the cp definition option value rel where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching cp definition option value rel , or < code > null < / code > if a matching cp definition option value rel could not be found */ @ Override public CPDefinitionOptionValueRel fetchByUUID_G ( String uuid , long groupId ) { } }
return fetchByUUID_G ( uuid , groupId , true ) ;
public class ProcessContext { /** * Sets the access control model which is used to determine authorized * subjects for activity execution . < br > * This method checks , if the access control model is compatible with * the log context , i . e . is contains all subjects and activities of the * log context . * @ param acModel An access control model . * @ param notifyListeners */ public void setACModel ( AbstractACModel < ? > acModel , boolean notifyListeners ) { } }
Validate . notNull ( acModel ) ; if ( this . acModel == acModel ) { return ; } validateACModel ( acModel ) ; this . acModel = acModel ; acModel . getContext ( ) . addContextListener ( this ) ; if ( notifyListeners ) { processContextListenerSupport . notifyACModelSet ( acModel ) ; }
public class ScopedRequestImpl { /** * Restores the map of request attributes from a map saved in the Session . * @ deprecated Moved the persisting of attributes out of the beehive NetUI * layer . Use { @ link # setAttributeMap } to set / merge the attributes . */ public void restoreAttributes ( ) { } }
String attrName = getScopedName ( STORED_ATTRS_ATTR ) ; Map savedAttrs = ( Map ) getSession ( ) . getAttribute ( attrName ) ; if ( savedAttrs != null ) { setAttributeMap ( savedAttrs ) ; }
public class GlucoseSyrup { /** * Initializes the additional parameters . */ private void initializeGlucose ( ) { } }
this . initializeGlucoseConfig ( ) ; this . watchesBin = new LNGVector < > ( ) ; this . permDiff = new LNGIntVector ( ) ; this . lastDecisionLevel = new LNGIntVector ( ) ; this . lbdQueue = new LNGBoundedLongQueue ( ) ; this . trailQueue = new LNGBoundedIntQueue ( ) ; this . assump = new LNGBooleanVector ( ) ; this . lbdQueue . initSize ( sizeLBDQueue ) ; this . trailQueue . initSize ( sizeTrailQueue ) ; this . myflag = 0 ; this . analyzeBtLevel = 0 ; this . analyzeLBD = 0 ; this . analyzeSzWithoutSelectors = 0 ; this . nbclausesbeforereduce = firstReduceDB ; this . conflicts = 0 ; this . conflictsRestarts = 0 ; this . sumLBD = 0 ; this . curRestart = 1 ;
public class LoginController { /** * / * package - private */ URL parseLocalRefUrl ( final HttpServletRequest request , final String refUrl ) { } }
URL rslt = null ; // default if ( StringUtils . isNotBlank ( refUrl ) ) { try { final URL context = new URL ( request . getRequestURL ( ) . toString ( ) ) ; final URL location = new URL ( context , refUrl ) ; if ( location . getProtocol ( ) . equals ( context . getProtocol ( ) ) && location . getHost ( ) . equals ( context . getHost ( ) ) && location . getPort ( ) == context . getPort ( ) ) { rslt = location ; } else { log . warn ( "The specified refUrl is not local: " + refUrl ) ; } } catch ( Exception e ) { log . warn ( "Unable to analyze specified refUrl: " + refUrl ) ; log . debug ( e ) ; } } return rslt ;
public class LockSet { /** * Determine whether or not this lock set contains any locked values which * are method return values . * @ param factory * the ValueNumberFactory that produced the lock values */ public boolean containsReturnValue ( ValueNumberFactory factory ) { } }
for ( int i = 0 ; i + 1 < array . length ; i += 2 ) { int valueNumber = array [ i ] ; if ( valueNumber < 0 ) { break ; } int lockCount = array [ i + 1 ] ; if ( lockCount > 0 && factory . forNumber ( valueNumber ) . hasFlag ( ValueNumber . RETURN_VALUE ) ) { return true ; } } return false ;
public class LogUtils { /** * Returns the context node for a test from its log document */ public static XdmNode getContextFromLog ( net . sf . saxon . s9api . DocumentBuilder builder , Document log ) throws Exception { } }
Element starttest = ( Element ) log . getElementsByTagName ( "starttest" ) . item ( 0 ) ; NodeList nl = starttest . getElementsByTagName ( "context" ) ; if ( nl == null || nl . getLength ( ) == 0 ) { return null ; } else { Element context = ( Element ) nl . item ( 0 ) ; Element value = ( Element ) context . getElementsByTagName ( "value" ) . item ( 0 ) ; nl = value . getChildNodes ( ) ; for ( int i = 0 ; i < nl . getLength ( ) ; i ++ ) { Node n = nl . item ( i ) ; if ( n . getNodeType ( ) == Node . ATTRIBUTE_NODE ) { String s = DomUtils . serializeNode ( value ) ; XdmNode xn = builder . build ( new StreamSource ( new CharArrayReader ( s . toCharArray ( ) ) ) ) ; return ( XdmNode ) xn . axisIterator ( Axis . ATTRIBUTE ) . next ( ) ; } else if ( n . getNodeType ( ) == Node . ELEMENT_NODE ) { Document doc = DomUtils . createDocument ( n ) ; return builder . build ( new DOMSource ( doc ) ) ; } } } return null ;
public class CommsLightTrace { /** * Util to trace the System Message ID ( where available ) of a message along with the default toString */ public static String msgToString ( AbstractMessage message ) { } }
if ( message == null ) return STRING_NULL ; if ( message instanceof JsMessage ) return message + "[" + ( ( JsMessage ) message ) . getSystemMessageId ( ) + "]" ; return message . toString ( ) ;
public class PlaceController { /** * This is called by the location director when we are leaving this place and need to clean up * after ourselves and shutdown . Derived classes should override this method ( being sure to * call < code > super . didLeavePlace < / code > ) and perform any necessary cleanup . */ public void didLeavePlace ( final PlaceObject plobj ) { } }
// let our delegates know what ' s up applyToDelegates ( new DelegateOp ( PlaceControllerDelegate . class ) { @ Override public void apply ( PlaceControllerDelegate delegate ) { delegate . didLeavePlace ( plobj ) ; } } ) ; // let the UI hierarchy know that we ' re outta here if ( _view != null ) { PlaceViewUtil . dispatchDidLeavePlace ( _view , plobj ) ; _ctx . clearPlaceView ( _view ) ; _view = null ; } _plobj = null ;
public class MergeUtils { /** * Add topic id to the idMap . * @ param id topic id * @ return updated topic id */ public String addId ( final URI id ) { } }
if ( id == null ) { return null ; } final URI localId = id . normalize ( ) ; index ++ ; final String newId = PREFIX + Integer . toString ( index ) ; idMap . put ( localId , newId ) ; return newId ;
public class PolymerClassRewriter { /** * Rewrites a given call to Polymer ( { } ) to a set of declarations and assignments which can be * understood by the compiler . * @ param exprRoot The root expression of the call to Polymer ( { } ) . * @ param cls The extracted { @ link PolymerClassDefinition } for the Polymer element created by this * call . */ void rewritePolymerCall ( Node exprRoot , final PolymerClassDefinition cls , boolean isInGlobalScope ) { } }
Node objLit = checkNotNull ( cls . descriptor ) ; // Add { @ code @ lends } to the object literal . JSDocInfoBuilder objLitDoc = new JSDocInfoBuilder ( true ) ; JSTypeExpression jsTypeExpression = new JSTypeExpression ( IR . string ( cls . target . getQualifiedName ( ) + ".prototype" ) , exprRoot . getSourceFileName ( ) ) ; objLitDoc . recordLends ( jsTypeExpression ) ; objLit . setJSDocInfo ( objLitDoc . build ( ) ) ; addTypesToFunctions ( objLit , cls . target . getQualifiedName ( ) , cls . defType ) ; PolymerPassStaticUtils . switchDollarSignPropsToBrackets ( objLit , compiler ) ; PolymerPassStaticUtils . quoteListenerAndHostAttributeKeys ( objLit , compiler ) ; for ( MemberDefinition prop : cls . props ) { if ( prop . value . isObjectLit ( ) ) { PolymerPassStaticUtils . switchDollarSignPropsToBrackets ( prop . value , compiler ) ; } } // For simplicity add everything into a block , before adding it to the AST . Node block = IR . block ( ) ; JSDocInfoBuilder constructorDoc = this . getConstructorDoc ( cls ) ; // Remove the original constructor JS docs from the objlit . Node ctorKey = cls . constructor . value . getParent ( ) ; if ( ctorKey != null ) { ctorKey . removeProp ( Node . JSDOC_INFO_PROP ) ; } if ( cls . target . isGetProp ( ) ) { // foo . bar = Polymer ( { . . . } ) ; Node assign = IR . assign ( cls . target . cloneTree ( ) , cls . constructor . value . cloneTree ( ) ) ; NodeUtil . markNewScopesChanged ( assign , compiler ) ; assign . setJSDocInfo ( constructorDoc . build ( ) ) ; Node exprResult = IR . exprResult ( assign ) ; exprResult . useSourceInfoIfMissingFromForTree ( cls . target ) ; block . addChildToBack ( exprResult ) ; } else { // var foo = Polymer ( { . . . } ) ; OR Polymer ( { . . . } ) ; Node var = IR . var ( cls . target . cloneTree ( ) , cls . constructor . value . cloneTree ( ) ) ; NodeUtil . markNewScopesChanged ( var , compiler ) ; var . useSourceInfoIfMissingFromForTree ( exprRoot ) ; var . setJSDocInfo ( constructorDoc . build ( ) ) ; block . addChildToBack ( var ) ; } appendPropertiesToBlock ( cls . props , block , cls . target . getQualifiedName ( ) + ".prototype." ) ; appendBehaviorMembersToBlock ( cls , block ) ; ImmutableList < MemberDefinition > readOnlyProps = parseReadOnlyProperties ( cls , block ) ; ImmutableList < MemberDefinition > attributeReflectedProps = parseAttributeReflectedProperties ( cls ) ; createExportsAndExterns ( cls , readOnlyProps , attributeReflectedProps ) ; removePropertyDocs ( objLit , PolymerClassDefinition . DefinitionType . ObjectLiteral ) ; Node statements = block . removeChildren ( ) ; Node parent = exprRoot . getParent ( ) ; // If the call to Polymer ( ) is not in the global scope and the assignment target // is not namespaced ( which likely means it ' s exported to the global scope ) , put the type // declaration into the global scope at the start of the current script . // This avoids unknown type warnings which are a result of the compiler ' s poor understanding of // types declared inside IIFEs or any non - global scope . We should revisit this decision as // the typechecker ' s support for non - global types improves . if ( ! isInGlobalScope && ! cls . target . isGetProp ( ) ) { Node scriptNode = NodeUtil . getEnclosingScript ( parent ) ; scriptNode . addChildrenToFront ( statements ) ; compiler . reportChangeToChangeScope ( scriptNode ) ; } else { Node beforeRoot = exprRoot . getPrevious ( ) ; if ( beforeRoot == null ) { parent . addChildrenToFront ( statements ) ; } else { parent . addChildrenAfter ( statements , beforeRoot ) ; } compiler . reportChangeToEnclosingScope ( parent ) ; } compiler . reportChangeToEnclosingScope ( statements ) ; // Since behavior files might contain language features that aren ' t present in the class file , // we might need to update the FeatureSet . if ( cls . features != null ) { Node scriptNode = NodeUtil . getEnclosingScript ( parent ) ; FeatureSet oldFeatures = ( FeatureSet ) scriptNode . getProp ( Node . FEATURE_SET ) ; FeatureSet newFeatures = oldFeatures . union ( cls . features ) ; if ( ! newFeatures . equals ( oldFeatures ) ) { scriptNode . putProp ( Node . FEATURE_SET , newFeatures ) ; compiler . reportChangeToChangeScope ( scriptNode ) ; } } if ( NodeUtil . isNameDeclaration ( exprRoot ) ) { Node assignExpr = varToAssign ( exprRoot ) ; parent . replaceChild ( exprRoot , assignExpr ) ; compiler . reportChangeToEnclosingScope ( assignExpr ) ; } // If property renaming is enabled , wrap the properties object literal // in a reflection call so that the properties are renamed consistently // with the class members . if ( polymerVersion > 1 && propertyRenamingEnabled && cls . descriptor != null ) { Node props = NodeUtil . getFirstPropMatchingKey ( cls . descriptor , "properties" ) ; if ( props != null && props . isObjectLit ( ) ) { addPropertiesConfigObjectReflection ( cls , props ) ; } }
public class XlsSaver { /** * 複数のオブジェクトをそれぞれのシートへ保存する 。 * @ param templateXlsIn 雛形となるExcelファイルの入力 * @ param xlsOut xlsOut 出力先のストリーム * @ param beanObjs 書き込むオブジェクトの配列 。 * @ return マッピング結果 。 * { @ link Configuration # isIgnoreSheetNotFound ( ) } の値がtrueで 、 シートが見つからない場合 、 結果に含まれません 。 * @ throws IllegalArgumentException { @ literal templateXlsIn = = null or xlsOut = = null or beanObjs = = null } * @ throws XlsMapperException マッピングに失敗した場合 * @ throws IOException テンプレートのファイルの読み込みやファイルの出力に失敗した場合 */ public MultipleSheetBindingErrors < Object > saveMultipleDetail ( final InputStream templateXlsIn , final OutputStream xlsOut , final Object [ ] beanObjs ) throws XlsMapperException , IOException { } }
ArgUtils . notNull ( templateXlsIn , "templateXlsIn" ) ; ArgUtils . notNull ( xlsOut , "xlsOut" ) ; ArgUtils . notEmpty ( beanObjs , "beanObjs" ) ; final AnnotationReader annoReader = new AnnotationReader ( configuration . getAnnotationMapping ( ) . orElse ( null ) ) ; final MultipleSheetBindingErrors < Object > multipleResult = new MultipleSheetBindingErrors < > ( ) ; final Workbook book ; try { book = WorkbookFactory . create ( templateXlsIn ) ; } catch ( InvalidFormatException e ) { throw new XlsMapperException ( MessageBuilder . create ( "file.faiiLoadTemplateExcel.notSupportType" ) . format ( ) , e ) ; } for ( int i = 0 ; i < beanObjs . length ; i ++ ) { final Object beanObj = beanObjs [ i ] ; final Class < ? > clazz = beanObj . getClass ( ) ; final XlsSheet sheetAnno = annoReader . getAnnotation ( clazz , XlsSheet . class ) ; if ( sheetAnno == null ) { throw new AnnotationInvalidException ( sheetAnno , MessageBuilder . create ( "anno.notFound" ) . varWithClass ( "property" , clazz ) . varWithAnno ( "anno" , XlsSheet . class ) . format ( ) ) ; } try { final Sheet [ ] xlsSheet = configuration . getSheetFinder ( ) . findForSaving ( book , sheetAnno , annoReader , beanObj ) ; multipleResult . addBindingErrors ( saveSheet ( xlsSheet [ 0 ] , beanObj , annoReader ) ) ; } catch ( SheetNotFoundException e ) { if ( configuration . isIgnoreSheetNotFound ( ) ) { logger . warn ( MessageBuilder . create ( "log.skipNotFoundSheet" ) . format ( ) , e ) ; continue ; } else { throw e ; } } } if ( configuration . isFormulaRecalcurationOnSave ( ) ) { book . setForceFormulaRecalculation ( true ) ; } book . write ( xlsOut ) ; return multipleResult ;
public class FileManagerImpl { /** * Outputs storage information stored on disk . Debugging interface . * @ param fname the name of the file to dump the info into . */ public void dump_disk_memory ( Writer out ) throws IOException { } }
long block_ptr ; long cache_end ; long magic ; int size ; long tail ; out . write ( "Information stored on disk\n" ) ; seek ( STARTOFDATA_LOCATION ) ; magic = readLong ( ) ; out . write ( "File magic number: " + magic + "\n" ) ; seek ( TAIL_DPTR ) ; tail = readLong ( ) ; out . write ( "Tail on disk: " + tail + "\n" ) ; cache_end = readLong ( ) ; out . write ( "End of cached free list info: " + cache_end + "\n" ) ; size = readInt ( ) ; out . write ( "First quick size: " + size + "\n" ) ; size = readInt ( ) ; out . write ( "Last quick size: " + size + "\n" ) ; size = readInt ( ) ; out . write ( "Grain size: " + size + "\n" ) ; size = readInt ( ) ; out . write ( "Acceptable waste: " + size + "\n" ) ; block_ptr = FIRST_TAIL ; while ( block_ptr < tail ) { seek ( block_ptr ) ; size = readInt ( ) ; out . write ( block_ptr + ", " + size + "\n" ) ; block_ptr += abs ( size ) ; } ; out . write ( "\n\n" ) ;
public class SelectPlan { /** * Returns a histogram that , for each field , approximates the distribution * of field values from the specified histogram satisfying the specified * predicate . * Assumes that : * < ul > * < li > Equality selection always finds matching records < / li > * < li > Values in a bucket have the same frequency ( uniform frequency ) < / li > * < li > Given values within two equal ranges ( of two joinable fields ) , all * values in the range having smaller number of values appear in the range * having larger number of values < / li > * < li > Distributions of values in different fields are independent with each * other < / li > * < / ul > * @ param hist * the input join distribution of field values * @ param pred * the predicate * @ return a histogram that , for each field , approximates the distribution * of field values satisfying the predicate */ public static Histogram predHistogram ( Histogram hist , Predicate pred ) { } }
if ( Double . compare ( hist . recordsOutput ( ) , 1.0 ) < 0 ) return new Histogram ( hist . fields ( ) ) ; // apply constant ranges Map < String , ConstantRange > cRanges = new HashMap < String , ConstantRange > ( ) ; for ( String fld : hist . fields ( ) ) { ConstantRange cr = pred . constantRange ( fld ) ; if ( cr != null ) cRanges . put ( fld , cr ) ; } Histogram crHist = constantRangeHistogram ( hist , cRanges ) ; // apply field joins Histogram jfHist = crHist ; Deque < String > flds = new LinkedList < String > ( jfHist . fields ( ) ) ; while ( ! flds . isEmpty ( ) ) { String fld = flds . removeFirst ( ) ; Set < String > group = pred . joinFields ( fld ) ; if ( group != null ) { flds . removeAll ( group ) ; group . add ( fld ) ; jfHist = joinFieldsHistogram ( jfHist , group ) ; } } return jfHist ;
public class AntXmlReport { private List < TestCaseModel > buildModel ( List < AggregatedTestResultEvent > testEvents ) { } }
List < TestCaseModel > tests = new ArrayList < > ( ) ; for ( AggregatedTestResultEvent e : testEvents ) { TestCaseModel model = new TestCaseModel ( ) ; if ( e . getStatus ( ) == TestStatus . IGNORED || e . getStatus ( ) == TestStatus . IGNORED_ASSUMPTION ) { if ( mavenExtensions ) { // This emits an empty < skipped / > element . model . skipped = "" ; } else { // No way to report these in pure ANT XML . continue ; } } model . name = applyFilters ( e . getDescription ( ) . getMethodName ( ) ) ; model . classname = e . getDescription ( ) . getClassName ( ) ; model . time = e . getExecutionTime ( ) / 1000.0 ; for ( FailureMirror m : e . getFailures ( ) ) { if ( m . isAssumptionViolation ( ) ) { // Assumptions are not represented in ANT or Maven XMLs . continue ; } else if ( m . isAssertionViolation ( ) ) { model . failures . add ( buildModel ( m ) ) ; } else { model . errors . add ( buildModel ( m ) ) ; } } tests . add ( model ) ; } return tests ;
public class AtomWriter { /** * Start the XML stream document by defining things like the type of encoding , and prefixes used . It needs to be * used before calling any write method . * @ param os { @ link OutputStream } to write to . * @ throws ODataRenderException if unable to render the feed */ public void startDocument ( OutputStream os ) throws ODataRenderException { } }
try { outputStream = os ; xmlWriter = XML_OUTPUT_FACTORY . createXMLStreamWriter ( os , UTF_8 . name ( ) ) ; metadataWriter = new AtomMetadataWriter ( xmlWriter , oDataUri , entityDataModel , nsConfigurationProvider ) ; dataWriter = new AtomDataWriter ( xmlWriter , entityDataModel , nsConfigurationProvider ) ; xmlWriter . writeStartDocument ( UTF_8 . name ( ) , XML_VERSION ) ; xmlWriter . setPrefix ( METADATA , nsConfigurationProvider . getOdataMetadataNs ( ) ) ; xmlWriter . setPrefix ( ODATA_DATA , nsConfigurationProvider . getOdataDataNs ( ) ) ; } catch ( XMLStreamException e ) { LOG . error ( "Not possible to start stream XML" ) ; throw new ODataRenderException ( "Not possible to start stream XML: " , e ) ; }
public class N { /** * Note : Copied from Google Guava under Apache License v2.0 * < br / > * < br / > * If a thread is interrupted during such a call , the call continues to block until the result is available or the * timeout elapses , and only then re - interrupts the thread . * @ param cmd */ public static void runUninterruptibly ( final Try . Runnable < InterruptedException > cmd ) { } }
N . checkArgNotNull ( cmd ) ; boolean interrupted = false ; try { while ( true ) { try { cmd . run ( ) ; return ; } catch ( InterruptedException e ) { interrupted = true ; } } } finally { if ( interrupted ) { Thread . currentThread ( ) . interrupt ( ) ; } }
public class CrudProvider { /** * 列名判空处理 * @ param field * @ return boolean */ private boolean isNull ( Field field , Object obj ) { } }
try { if ( ! field . isAccessible ( ) ) { field . setAccessible ( true ) ; } return field . get ( obj ) == null ; } catch ( Exception e ) { e . printStackTrace ( ) ; } return false ;
public class RaidNode { /** * { @ inheritDoc } */ public void sendRecoveryTime ( String path , long recoveryTime , String taskId ) throws IOException { } }
this . blockIntegrityMonitor . sendRecoveryTime ( path , recoveryTime , taskId ) ;
public class TempByteHolder { /** * Truncates buffered data to specified size . Can not be used to extend data . * Repositions OutputStream at the end of truncated data . * If current read offset or mark is past the new end of data , it is moved at the new end . */ public void truncate ( int offset ) throws IOException { } }
if ( ( offset < 0 ) || ( offset > _write_pos ) ) throw new IOException ( "bad truncate offset" ) ; if ( _read_pos > offset ) _read_pos = offset ; if ( _mark_pos > offset ) _mark_pos = offset ; _write_pos = offset ; if ( _file_high > offset ) _file_high = offset ; moveWindow ( _write_pos ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getGSCD ( ) { } }
if ( gscdEClass == null ) { gscdEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 468 ) ; } return gscdEClass ;
public class TypedCodeGenerator { /** * Given a method definition node , returns the { @ link ObjectType } corresponding * to the class the method is defined on , or null if it is not a prototype method . */ private ObjectType findMethodOwner ( Node n ) { } }
if ( n == null ) { return null ; } Node parent = n . getParent ( ) ; FunctionType ctor = null ; if ( parent . isAssign ( ) ) { Node target = parent . getFirstChild ( ) ; if ( NodeUtil . isPrototypeProperty ( target ) ) { // TODO ( johnlenz ) : handle non - global types JSType type = registry . getGlobalType ( target . getFirstFirstChild ( ) . getQualifiedName ( ) ) ; ctor = type != null ? ( ( ObjectType ) type ) . getConstructor ( ) : null ; } } else if ( parent . isClass ( ) ) { // TODO ( sdh ) : test this case once the type checker understands ES6 classes ctor = parent . getJSType ( ) . toMaybeFunctionType ( ) ; } return ctor != null ? ctor . getInstanceType ( ) : null ;
public class Cluster { /** * Add the vectors of all documents to a composite vector . */ void set_composite_vector ( ) { } }
composite_ . clear ( ) ; for ( Document < K > document : documents_ ) { composite_ . add_vector ( document . feature ( ) ) ; }
public class Operations { /** * Create an { @ link Results index operation } instance that will use the supplied { @ link NavigableMap } ( provided by an index ) * and the { @ link Converter } to return all of the { @ link NodeKey } s that satisfy the given constraints . * @ param keysByValue the index ' s map of values - to - NodeKey ; may not be null * @ param converter the converter ; may not be null * @ param constraints the constraints ; may not be null but may be empty if there are no constraints * @ param variables the bound variables for this query ; may not be null but may be empty * @ return the index operation ; never null */ public static < T > FilterOperation createFilter ( NavigableMap < T , String > keysByValue , Converter < T > converter , Collection < Constraint > constraints , Map < String , Object > variables ) { } }
if ( keysByValue . isEmpty ( ) ) return EMPTY_FILTER_OPERATION ; NodeKeysAccessor < T , String > nodeKeysAccessor = new NodeKeysAccessor < T , String > ( ) { @ Override public Iterator < String > getNodeKeys ( NavigableMap < T , String > keysByValue ) { return keysByValue . values ( ) . iterator ( ) ; } @ Override public void addAllTo ( NavigableMap < T , String > keysByValue , Set < String > matchedKeys ) { matchedKeys . addAll ( keysByValue . values ( ) ) ; } } ; OperationBuilder < T > builder = new BasicOperationBuilder < > ( keysByValue , converter , nodeKeysAccessor , variables ) ; for ( Constraint constraint : constraints ) { OperationBuilder < T > newBuilder = builder . apply ( constraint , false ) ; if ( newBuilder != null ) builder = newBuilder ; } return builder ;
public class AbstractWComponent { /** * { @ inheritDoc } */ @ Override public String getAccessibleText ( ) { } }
ComponentModel model = getComponentModel ( ) ; return I18nUtilities . format ( null , model . getAccessibleText ( ) ) ;
public class ColumnMajorSparseMatrix { /** * Parses { @ link ColumnMajorSparseMatrix } from the given CSV string . * @ param csv the CSV string representing a matrix * @ return a parsed matrix */ public static ColumnMajorSparseMatrix fromCSV ( String csv ) { } }
return Matrix . fromCSV ( csv ) . to ( Matrices . SPARSE_COLUMN_MAJOR ) ;
public class Uris { /** * Creates a new URI based off the given string . This function differs from newUri in that it throws an * AssertionError instead of a URISyntaxException - so it is suitable for use in static locations as long as * you can be sure it is a valid string that is being parsed . * @ param url the string to parse * @ param strict whether or not to perform strict escaping . ( defaults to false ) * @ return the parsed , normalized URI */ public static URI createUri ( final String url , final boolean strict ) { } }
try { return newUri ( url , strict ) ; } catch ( URISyntaxException e ) { throw new AssertionError ( "Error creating URI: " + e . getMessage ( ) ) ; }
public class SimpleCookieCipher { /** * Initialize this component . < br > * This is basically called by DI setting file . */ @ PostConstruct public synchronized void initialize ( ) { } }
final FwWebDirection direction = assistWebDirection ( ) ; CookieResourceProvider provider = direction . assistCookieResourceProvider ( ) ; invertibleCipher = provider . provideCipher ( ) ; if ( invertibleCipher == null ) { final String msg = "No assist for the invertible cipher of cookie." ; throw new FwRequiredAssistNotFoundException ( msg ) ; } // no logging here because cookie manager do it
public class CmsToolBar { /** * Clears the left toolbar buttons . < p > */ public void clearButtonsLeft ( ) { } }
m_itemsLeft . removeAllComponents ( ) ; m_leftButtons . removeAllComponents ( ) ; // in case the app title is set , make sure to keep the label in the button bar if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( m_appIndicator . getValue ( ) ) ) { m_itemsLeft . addComponent ( m_appIndicator ) ; } updateFoldingThreshhold ( ) ;
public class ChangesHolder { /** * Serialize the Field into the given { @ link ObjectOutput } * @ param out the stream in which we serialize the Field * @ param field the Field instance to serialize * @ throws IOException if the Field could not be serialized */ private static void writeField ( ObjectOutput out , Fieldable field ) throws IOException { } }
// Name out . writeUTF ( field . name ( ) ) ; // Flags writeFlags ( out , field ) ; if ( field . getBoost ( ) != 1.0f ) { // Boost out . writeFloat ( field . getBoost ( ) ) ; } // Value writeValue ( out , field ) ;
public class CmsPopup { /** * Called on mouse up in the caption area , ends dragging by ending event * capture . * @ param event the mouse up event that ended dragging * @ see DOM # releaseCapture * @ see # beginDragging * @ see # endDragging */ protected void endDragging ( MouseUpEvent event ) { } }
m_dragging = false ; DOM . releaseCapture ( getElement ( ) ) ; removeStyleName ( I_CmsLayoutBundle . INSTANCE . dialogCss ( ) . dragging ( ) ) ;
public class Restarter { /** * Relaunch the application using the specified classloader . * @ param classLoader the classloader to use * @ return any exception that caused the launch to fail or { @ code null } * @ throws Exception in case of errors */ protected Throwable relaunch ( ClassLoader classLoader ) throws Exception { } }
RestartLauncher launcher = new RestartLauncher ( classLoader , this . mainClassName , this . args , this . exceptionHandler ) ; launcher . start ( ) ; launcher . join ( ) ; return launcher . getError ( ) ;
public class TaskTrack { /** * value : type + host + port * type : S / E ( start / end ) */ @ Override public byte [ ] getValue ( ) { } }
StringBuilder sb = new StringBuilder ( 32 ) ; if ( start != null ) { sb . append ( KVSerializable . START ) ; } else { sb . append ( KVSerializable . END ) ; } sb . append ( MetricUtils . AT ) . append ( host ) . append ( MetricUtils . AT ) . append ( port ) ; return sb . toString ( ) . getBytes ( ) ;
public class UrlParser { /** * work with parameters */ @ Nullable public String getQueryParameter ( @ NotNull String key ) { } }
if ( query == - 1 ) { return null ; } if ( queryPositions == null ) { parseQueryParameters ( ) ; } return findParameter ( raw , queryPositions , key ) ;
public class Update { /** * Static factory method to create an Update using the provided key * @ param key the field name for the update operation * @ param value the value to set for the field * @ return Updated object */ public static Update update ( String key , Object value ) { } }
return new Update ( ) . set ( key , value ) ;
public class BudgetServiceLocator { /** * For the given interface , get the stub implementation . * If this service has no port for the given interface , * then ServiceException is thrown . */ public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } }
try { if ( com . google . api . ads . adwords . axis . v201809 . cm . BudgetServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . adwords . axis . v201809 . cm . BudgetServiceSoapBindingStub _stub = new com . google . api . ads . adwords . axis . v201809 . cm . BudgetServiceSoapBindingStub ( new java . net . URL ( BudgetServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getBudgetServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ;
public class Settings { /** * Retrieves the H2 data directory - if the database has been moved to the * temp directory this method will return the temp directory . * @ return the data directory to store data files * @ throws java . io . IOException is thrown if an java . io . IOException occurs of * course . . . */ public File getH2DataDirectory ( ) throws IOException { } }
final String h2Test = getString ( Settings . KEYS . H2_DATA_DIRECTORY ) ; final File path ; if ( h2Test != null && ! h2Test . isEmpty ( ) ) { path = getDataFile ( Settings . KEYS . H2_DATA_DIRECTORY ) ; } else { path = getDataFile ( Settings . KEYS . DATA_DIRECTORY ) ; } if ( path != null && ( path . exists ( ) || path . mkdirs ( ) ) ) { return path ; } throw new IOException ( String . format ( "Unable to create the h2 data directory '%s'" , ( path == null ) ? "unknown" : path . getAbsolutePath ( ) ) ) ;
public class ViewQuery { /** * Helper method to properly encode a string . * This method can be overridden if a different encoding logic needs to be * used . If so , note that { @ link # keys ( JsonArray ) keys } is not encoded via * this method , but by the core . * @ param source source string . * @ return encoded target string . */ protected String encode ( final String source ) { } }
try { return URLEncoder . encode ( source , "UTF-8" ) ; } catch ( Exception ex ) { throw new RuntimeException ( "Could not prepare view argument: " + ex ) ; }
public class NettyUDPClient { /** * This method will connect the datagram channel with the server and send * the { @ link Events # CONNECT } message to server . * @ param session * The session for which the datagram channel is being created . * @ param datagramChannel * The channel on which the message is to be sent to remote * server . * @ param serverAddress * The remote address of the server to which to send this * message . * @ param timeout * Amount of time to wait for the connection to happen . * < b > NOTE < / b > Since this is UDP there is actually no " real " * connection . * @ return Returns a ChannelFuture which can be used to check the success of * this operation . < b > NOTE < / b > Success in case of UDP means message * is sent to server . It does not mean that the server has received * it . * @ throws UnknownHostException */ public ChannelFuture connect ( Session session , DatagramChannel datagramChannel , InetSocketAddress serverAddress , int timeout , TimeUnit unit ) throws UnknownHostException , InterruptedException { } }
if ( null == datagramChannel ) { throw new NullPointerException ( "DatagramChannel passed to connect method cannot be null" ) ; } if ( ! datagramChannel . isBound ( ) ) { throw new IllegalStateException ( "DatagramChannel: " + datagramChannel + " Passed to connect method is not bound" ) ; } Event event = Events . event ( null , Events . CONNECT ) ; ChannelFuture future = datagramChannel . write ( event , serverAddress ) ; future . addListener ( new ChannelFutureListener ( ) { @ Override public void operationComplete ( ChannelFuture future ) throws Exception { if ( ! future . isSuccess ( ) ) { throw new RuntimeException ( future . getCause ( ) ) ; } } } ) ; CLIENTS . put ( datagramChannel . getLocalAddress ( ) , session ) ; return future ;
public class InitialMatching2 { /** * Sets a { @ link Consumer2 } to execute if this matches . */ public FluentMatchingC < T > then ( Consumer2 < A , B > consumer ) { } }
return then ( new FluentMatchingC < > ( value ) , consumer ) ;
public class TypePredicates { /** * Create a predicate to check that a type has a given name . * @ param name name on the type * @ return Predicate to check that a type has a given name . */ public static Predicate < Class < ? > > named ( final String name ) { } }
return clazz -> clazz . getName ( ) . equals ( name ) ;
public class Layer { /** * Internal method . Attach a Shape to the Layers Color Map */ final void attachShapeToColorMap ( final Shape < ? > shape ) { } }
if ( null != shape ) { String color = shape . getColorKey ( ) ; if ( null != color ) { m_shape_color_map . remove ( color ) ; shape . setColorKey ( null ) ; } int count = 0 ; do { count ++ ; color = m_c_rotor . next ( ) ; } while ( ( m_shape_color_map . get ( color ) != null ) && ( count <= ColorKeyRotor . COLOR_SPACE_MAXIMUM ) ) ; if ( count > ColorKeyRotor . COLOR_SPACE_MAXIMUM ) { throw new IllegalArgumentException ( "Exhausted color space " + count ) ; } m_shape_color_map . put ( color , shape ) ; shape . setColorKey ( color ) ; }
public class RBBIRuleScanner { RBBINode pushNewNode ( int nodeType ) { } }
fNodeStackPtr ++ ; if ( fNodeStackPtr >= kStackSize ) { System . out . println ( "RBBIRuleScanner.pushNewNode - stack overflow." ) ; error ( RBBIRuleBuilder . U_BRK_INTERNAL_ERROR ) ; } fNodeStack [ fNodeStackPtr ] = new RBBINode ( nodeType ) ; return fNodeStack [ fNodeStackPtr ] ;
public class SqlContextFactoryImpl { /** * 定数クラスパラメータMap生成 * @ return 定数クラスパラメータMap */ private Map < ? extends String , ? extends Parameter > buildConstParamMap ( ) { } }
Map < String , Parameter > paramMap = new HashMap < > ( ) ; for ( String className : constantClassNames ) { if ( StringUtils . isNotBlank ( className ) ) { try { Class < ? > targetClass = Class . forName ( className , true , Thread . currentThread ( ) . getContextClassLoader ( ) ) ; makeConstParamMap ( paramMap , targetClass ) ; } catch ( ClassNotFoundException ex ) { LOG . error ( ex . getMessage ( ) , ex ) ; } } } return paramMap ;