signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SpiderPanel { /** * This method initializes the working Panel . * @ return javax . swing . JScrollPane */ @ Override protected JPanel getWorkPanel ( ) { } }
if ( mainPanel == null ) { mainPanel = new JPanel ( new BorderLayout ( ) ) ; tabbedPane = new JTabbedPane ( ) ; tabbedPane . addTab ( Constant . messages . getString ( "spider.panel.tab.urls" ) , getUrlsTableScrollPane ( ) ) ; tabbedPane . addTab ( Constant . messages . getString ( "spider.panel.tab.addednodes" ) , getAddedNodesTableScrollPane ( ) ) ; tabbedPane . addTab ( Constant . messages . getString ( "spider.panel.tab.messages" ) , getMessagesTableScrollPanel ( ) ) ; tabbedPane . setSelectedIndex ( 0 ) ; mainPanel . add ( tabbedPane ) ; } return mainPanel ;
public class JobOperations { /** * Gets the specified { @ link CloudJob } . * @ param jobId The ID of the job to get . * @ return A { @ link CloudJob } containing information about the specified Azure Batch job . * @ throws BatchErrorException Exception thrown when an error response is received from the Batch service . * @ throws IOException Exception thrown when there is an error in serialization / deserialization of data sent to / received from the Batch service . */ public CloudJob getJob ( String jobId ) throws BatchErrorException , IOException { } }
return getJob ( jobId , null , null ) ;
public class UtilAbstractAction { /** * Check for a confirmation id . This is a random string embedded * in some requests to confirm that the incoming request came from a page * we generated . Not all pages will have such an id but if we do it must * match . * We expect the request parameter to be of the form < br / > * confirmationid = id < p > . * @ param request Needed to locate session * @ param form * @ return String forward to here on error . null for OK . * @ throws Throwable */ protected String checkConfirmationId ( final HttpServletRequest request , final UtilActionForm form ) throws Throwable { } }
String reqpar = request . getParameter ( "confirmationid" ) ; if ( reqpar == null ) { return null ; } if ( ! reqpar . equals ( form . getConfirmationId ( ) ) ) { return "badConformationId" ; } return null ;
public class GSCCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setCELLHI ( Integer newCELLHI ) { } }
Integer oldCELLHI = cellhi ; cellhi = newCELLHI ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . GSCC__CELLHI , oldCELLHI , cellhi ) ) ;
public class Binder { /** * Apply the chain of transforms and bind them to a virtual method specified * using the end signature plus the given class and name . The method will * be retrieved using the given Lookup and must match the end signature * exactly . * If the final handle ' s type does not exactly match the initial type for * this Binder , an additional cast will be attempted . * @ param lookup the MethodHandles . Lookup to use to look up the method * @ param name the name of the method to invoke * @ return the full handle chain , bound to the given method * @ throws java . lang . NoSuchMethodException if the method does not exist * @ throws java . lang . IllegalAccessException if the method is not accessible */ public MethodHandle invokeVirtual ( MethodHandles . Lookup lookup , String name ) throws NoSuchMethodException , IllegalAccessException { } }
return invoke ( lookup . findVirtual ( type ( ) . parameterType ( 0 ) , name , type ( ) . dropParameterTypes ( 0 , 1 ) ) ) ;
public class AttachmentManager { /** * Downloads the content of an { @ link com . taskadapter . redmineapi . bean . Attachment } from the Redmine server . * @ param issueAttachment the { @ link com . taskadapter . redmineapi . bean . Attachment } * @ return the content of the attachment as a byte [ ] array * @ throws RedmineCommunicationException thrown in case the download fails */ public byte [ ] downloadAttachmentContent ( Attachment issueAttachment ) throws RedmineException { } }
final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; downloadAttachmentContent ( issueAttachment , baos ) ; try { baos . close ( ) ; } catch ( IOException e ) { throw new RedmineInternalError ( ) ; } return baos . toByteArray ( ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public FNCYftUnits createFNCYftUnitsFromString ( EDataType eDataType , String initialValue ) { } }
FNCYftUnits result = FNCYftUnits . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class NBTOutputStream { /** * Writes a { @ code TAG _ Compound } tag . * @ param tag The tag . * @ throws java . io . IOException if an I / O error occurs . */ private void writeCompoundTagPayload ( CompoundTag tag ) throws IOException { } }
for ( Tag < ? > childTag : tag . getValue ( ) . values ( ) ) { writeTag ( childTag ) ; } os . writeByte ( TagType . TAG_END . getId ( ) ) ; // end tag - better way ?
public class MetricMatcher { /** * Create a stream of TimeSeriesMetricDeltas with values . */ public Stream < Entry < MatchedName , MetricValue > > filter ( Context t ) { } }
return t . getTSData ( ) . getCurrentCollection ( ) . get ( this :: match , x -> true ) . stream ( ) . flatMap ( this :: filterMetricsInTsv ) ;
public class Preconditions { /** * Ensures that the given index is valid for an array , list or string of the given size . * @ param index index to check * @ param size size of the array , list or string * @ param errorMessage The message for the { @ code IndexOutOfBoundsException } that is thrown if the check fails . * @ throws IllegalArgumentException Thrown , if size is negative . * @ throws IndexOutOfBoundsException Thrown , if the index negative or greater than or equal to size */ public static void checkElementIndex ( int index , int size , @ Nullable String errorMessage ) { } }
checkArgument ( size >= 0 , "Size was negative." ) ; if ( index < 0 || index >= size ) { throw new IndexOutOfBoundsException ( String . valueOf ( errorMessage ) + " Index: " + index + ", Size: " + size ) ; }
public class OptimizationRequestHandler { /** * Find a solution of the linear ( equalities ) system A . x = b . * A is a pxn matrix , with rank ( A ) = p < n . * @ see " S . Boyd and L . Vandenberghe , Convex Optimization , p . 682" * NB : we are waiting for Csparsej to fix its qr decomposition issues . * @ TODO : sign this method with more restrictive class parameters */ protected DoubleMatrix1D findEqFeasiblePoint ( DoubleMatrix2D AMatrix , DoubleMatrix1D bVector ) throws Exception { } }
int p = AMatrix . rows ( ) ; int m = AMatrix . columns ( ) ; if ( m <= p ) { LogFactory . getLog ( this . getClass ( ) . getName ( ) ) . error ( "Equalities matrix A must be pxn with rank(A) = p < n" ) ; throw new RuntimeException ( "Equalities matrix A must be pxn with rank(A) = p < n" ) ; } if ( AMatrix instanceof SparseDoubleMatrix2D ) { QRSparseFactorization qr = new QRSparseFactorization ( ( SparseDoubleMatrix2D ) AMatrix ) ; qr . factorize ( ) ; DoubleMatrix1D x = qr . solve ( bVector ) ; return x ; } else { return findEqFeasiblePoint2 ( AMatrix , bVector ) ; }
public class Specimen { /** * syntactic sugar */ public SpecimenTreatmentComponent addTreatment ( ) { } }
SpecimenTreatmentComponent t = new SpecimenTreatmentComponent ( ) ; if ( this . treatment == null ) this . treatment = new ArrayList < SpecimenTreatmentComponent > ( ) ; this . treatment . add ( t ) ; return t ;
public class MethodCallUtil { /** * Creates a method that calls the passed method , injecting its parameters * using getters as necessary , and returns a string that invokes the new * method . The new method returns the passed method ' s return value , if any . * If a method without parameters is provided , that method will be called and * no parameters will be passed . If the passed method declared any checked * exceptions , the generated method will catch and rethrow those as * { @ link com . google . gwt . inject . client . CreationException } . * @ param method method to call ( can be constructor ) * @ param invokeeName expression that evaluates to the object on which * the method is to be called . If null the method will be called * in the current scope . * @ param parameterNames array with parameter names that can replace getter * methods ( usually used to fetch injected values ) in the returned * string . The array length must match the number of method * parameters . A { @ code null } value denotes that the getter method * should be used . * @ param nameGenerator NameGenerator to use for ensuring method name uniqueness * @ param methodsOutput a list where all new methods created by this * call are added * @ return string calling the generated method */ public SourceSnippet createMethodCallWithInjection ( MethodLiteral < ? , ? > method , String invokeeName , String [ ] parameterNames , NameGenerator nameGenerator , List < InjectorMethod > methodsOutput ) throws NoSourceNameException { } }
boolean hasInvokee = invokeeName != null ; boolean useNativeMethod = method . isPrivate ( ) || ReflectUtil . isPrivate ( method . getDeclaringType ( ) ) ; boolean isThrowing = hasCheckedExceptions ( method ) ; // Determine method signature parts . String invokeeTypeName = ReflectUtil . getSourceName ( method . getRawDeclaringType ( ) ) ; int invokerParamCount = method . getParameterTypes ( ) . size ( ) + ( hasInvokee ? 1 : 0 ) ; TypeLiteral < ? > returnType = method . getReturnType ( ) ; String returnTypeString = ReflectUtil . getSourceName ( returnType ) ; boolean returning = ! returnType . getRawType ( ) . equals ( Void . TYPE ) ; String invokerMethodName = getInvokerMethodName ( method , nameGenerator ) ; // The invoker method is placed in the fragment of the package that declares // the method , so it has access to the same package - private types as the // method declaration . String invokerPackageName ; if ( useNativeMethod && ! hasInvokee ) { // In this case , the type of the invokee is not mentioned by the type // signature of the invoker , and since we ' re using native code , we can // write a call to the target method even if the invokee is fully private . // This handles the case of a user statically injecting a private inner // class . // Pick a package somewhat arbitrarily ; since we ' re using native code , it // doesn ' t really matter where it goes . The declaring type ' s package is // easy to get to : invokerPackageName = method . getDeclaringType ( ) . getRawType ( ) . getPackage ( ) . getName ( ) ; } else { invokerPackageName = ReflectUtil . getUserPackageName ( method . getDeclaringType ( ) ) ; // TODO ( dburrows ) : won ' t this silently fail if some * parameters * to the // invokee have limited visibility ? Currently I believe that we ' ll just // generate noncompiling code . } methodsOutput . add ( createInvoker ( invokeeName , invokeeTypeName , hasInvokee , useNativeMethod , isThrowing , invokerMethodName , invokerPackageName , invokerParamCount , method , returnTypeString , returning , isLongAccess ( method ) ) ) ; return new InvokerCall ( hasInvokee , invokeeName , invokerMethodName , invokerPackageName , invokerParamCount , method , parameterNames ) ;
public class SoyValueConverter { /** * Returns a SoyValueProvider corresponding to a Java object , but doesn ' t perform any work until * resolve ( ) is called . */ private SoyValueProvider convertLazy ( @ Nullable final Object obj ) { } }
SoyValueProvider convertedPrimitive = convertCheap ( obj ) ; if ( convertedPrimitive != null ) { return convertedPrimitive ; } else { return new SoyAbstractCachingValueProvider ( ) { @ Override protected SoyValue compute ( ) { return convertNonPrimitive ( obj ) . resolve ( ) ; } @ Override public RenderResult status ( ) { return RenderResult . done ( ) ; } } ; }
public class JDBCLoader { /** * jdbcloader main . ( main is directly used by tests as well be sure to reset statics that you need to start over ) * @ param args * @ throws IOException * @ throws InterruptedException */ public static void main ( String [ ] args ) throws IOException , InterruptedException { } }
start = System . currentTimeMillis ( ) ; long insertTimeStart = start ; long insertTimeEnd ; final JDBCLoaderConfig cfg = new JDBCLoaderConfig ( ) ; cfg . parse ( JDBCLoader . class . getName ( ) , args ) ; FileReader fr = null ; BufferedReader br = null ; m_config = cfg ; configuration ( ) ; // Split server list final String [ ] serverlist = m_config . servers . split ( "," ) ; // read username and password from txt file if ( m_config . credentials != null && ! m_config . credentials . trim ( ) . isEmpty ( ) ) { Properties props = MiscUtils . readPropertiesFromCredentials ( m_config . credentials ) ; m_config . user = props . getProperty ( "username" ) ; m_config . password = props . getProperty ( "password" ) ; } // If we need to prompt the user for a VoltDB password , do so . m_config . password = CLIConfig . readPasswordIfNeeded ( m_config . user , m_config . password , "Enter VoltDB password: " ) ; // Create connection final ClientConfig c_config ; AutoReconnectListener listener = new AutoReconnectListener ( ) ; if ( m_config . stopondisconnect ) { c_config = new ClientConfig ( m_config . user , m_config . password , null ) ; c_config . setReconnectOnConnectionLoss ( false ) ; } else { c_config = new ClientConfig ( m_config . user , m_config . password , listener ) ; c_config . setReconnectOnConnectionLoss ( true ) ; } if ( m_config . ssl != null && ! m_config . ssl . trim ( ) . isEmpty ( ) ) { c_config . setTrustStoreConfigFromPropertyFile ( m_config . ssl ) ; c_config . enableSSL ( ) ; } c_config . setProcedureCallTimeout ( 0 ) ; // Set procedure all to infinite Client csvClient = null ; try { csvClient = JDBCLoader . getClient ( c_config , serverlist , m_config . port ) ; } catch ( Exception e ) { System . err . println ( "Error connecting to the servers: " + m_config . servers + ": " + e ) ; System . exit ( - 1 ) ; } assert ( csvClient != null ) ; try { long readerTime ; long insertCount ; long ackCount ; final JDBCLoader errHandler = new JDBCLoader ( ) ; final CSVDataLoader dataLoader ; errHandler . launchErrorFlushProcessor ( ) ; if ( m_config . useSuppliedProcedure ) { dataLoader = new CSVTupleDataLoader ( ( ClientImpl ) csvClient , m_config . procedure , errHandler ) ; } else { dataLoader = new CSVBulkDataLoader ( ( ClientImpl ) csvClient , m_config . table , m_config . batch , m_config . update , errHandler ) ; } if ( ! m_config . stopondisconnect ) { listener . setLoader ( dataLoader ) ; } // If we need to prompt the user for a JDBC datasource password , do so . m_config . jdbcpassword = CLIConfig . readPasswordIfNeeded ( m_config . jdbcuser , m_config . jdbcpassword , "Enter JDBC source database password: " ) ; // Created Source reader JDBCStatementReader . initializeReader ( cfg , csvClient ) ; JDBCStatementReader jdbcReader = new JDBCStatementReader ( dataLoader , errHandler ) ; Thread readerThread = new Thread ( jdbcReader ) ; readerThread . setName ( "JDBCSourceReader" ) ; readerThread . setDaemon ( true ) ; // Wait for reader to finish . readerThread . start ( ) ; readerThread . join ( ) ; insertTimeEnd = System . currentTimeMillis ( ) ; csvClient . close ( ) ; errHandler . waitForErrorFlushComplete ( ) ; readerTime = ( jdbcReader . m_parsingTime ) / 1000000 ; insertCount = dataLoader . getProcessedRows ( ) ; ackCount = insertCount - dataLoader . getFailedRows ( ) ; if ( errHandler . hasReachedErrorLimit ( ) ) { System . out . println ( "The number of failed rows exceeds the configured maximum failed rows: " + m_config . maxerrors ) ; } System . out . println ( "Read " + insertCount + " rows from file and successfully inserted " + ackCount + " rows (final)" ) ; errHandler . produceFiles ( ackCount , insertCount ) ; close_cleanup ( ) ; // In test junit mode we let it continue for reuse if ( ! JDBCLoader . testMode ) { System . exit ( errHandler . m_errorInfo . isEmpty ( ) ? 0 : - 1 ) ; } } catch ( Exception ex ) { System . err . println ( "Exception Happened while loading CSV data : " + ex ) ; System . exit ( 1 ) ; }
public class GetCSVHeaderResult { /** * The header information for the . csv file for the user import job . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCSVHeader ( java . util . Collection ) } or { @ link # withCSVHeader ( java . util . Collection ) } if you want to * override the existing values . * @ param cSVHeader * The header information for the . csv file for the user import job . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetCSVHeaderResult withCSVHeader ( String ... cSVHeader ) { } }
if ( this . cSVHeader == null ) { setCSVHeader ( new java . util . ArrayList < String > ( cSVHeader . length ) ) ; } for ( String ele : cSVHeader ) { this . cSVHeader . add ( ele ) ; } return this ;
public class FastMath { /** * Internal helper method for natural logarithm function . * @ param x original argument of the natural logarithm function * @ param hiPrec extra bits of precision on output ( To Be Confirmed ) * @ return log ( x ) */ private static double log ( final double x , final double [ ] hiPrec ) { } }
if ( x == 0 ) { // Handle special case of + 0 / - 0 return Double . NEGATIVE_INFINITY ; } long bits = Double . doubleToLongBits ( x ) ; /* Handle special cases of negative input , and NaN */ if ( ( bits & 0x8000000000000000L ) != 0 || x != x ) { if ( x != 0.0 ) { if ( hiPrec != null ) { hiPrec [ 0 ] = Double . NaN ; } return Double . NaN ; } } /* Handle special cases of Positive infinity . */ if ( x == Double . POSITIVE_INFINITY ) { if ( hiPrec != null ) { hiPrec [ 0 ] = Double . POSITIVE_INFINITY ; } return Double . POSITIVE_INFINITY ; } /* Extract the exponent */ int exp = ( int ) ( bits >> 52 ) - 1023 ; if ( ( bits & 0x7ff0000000000000L ) == 0 ) { // Subnormal ! if ( x == 0 ) { // Zero if ( hiPrec != null ) { hiPrec [ 0 ] = Double . NEGATIVE_INFINITY ; } return Double . NEGATIVE_INFINITY ; } /* Normalize the subnormal number . */ bits <<= 1 ; while ( ( bits & 0x0010000000000000L ) == 0 ) { -- exp ; bits <<= 1 ; } } if ( exp == - 1 || exp == 0 ) { if ( x < 1.01 && x > 0.99 && hiPrec == null ) { /* The normal method doesn ' t work well in the range [ 0.99 , 1.01 ] , so call do a straight polynomial expansion in higer precision . */ /* Compute x - 1.0 and split it */ double xa = x - 1.0 ; double xb = xa - x + 1.0 ; double tmp = xa * HEX_40000000 ; double aa = xa + tmp - tmp ; double ab = xa - aa ; xa = aa ; xb = ab ; final double [ ] lnCoef_last = LN_QUICK_COEF [ LN_QUICK_COEF . length - 1 ] ; double ya = lnCoef_last [ 0 ] ; double yb = lnCoef_last [ 1 ] ; for ( int i = LN_QUICK_COEF . length - 2 ; i >= 0 ; i -- ) { /* Multiply a = y * x */ aa = ya * xa ; ab = ya * xb + yb * xa + yb * xb ; /* split , so now y = a */ tmp = aa * HEX_40000000 ; ya = aa + tmp - tmp ; yb = aa - ya + ab ; /* Add a = y + lnQuickCoef */ final double [ ] lnCoef_i = LN_QUICK_COEF [ i ] ; aa = ya + lnCoef_i [ 0 ] ; ab = yb + lnCoef_i [ 1 ] ; /* Split y = a */ tmp = aa * HEX_40000000 ; ya = aa + tmp - tmp ; yb = aa - ya + ab ; } /* Multiply a = y * x */ aa = ya * xa ; ab = ya * xb + yb * xa + yb * xb ; /* split , so now y = a */ tmp = aa * HEX_40000000 ; ya = aa + tmp - tmp ; yb = aa - ya + ab ; return ya + yb ; } } // lnm is a log of a number in the range of 1.0 - 2.0 , so 0 < = lnm < ln ( 2) final double [ ] lnm = lnMant . LN_MANT [ ( int ) ( ( bits & 0x000ffc0000000000L ) >> 42 ) ] ; /* double epsilon = x / Double . longBitsToDouble ( bits & 0xfffffc00000L ) ; epsilon - = 1.0; */ // y is the most significant 10 bits of the mantissa // double y = Double . longBitsToDouble ( bits & 0xfffffc00000L ) ; // double epsilon = ( x - y ) / y ; final double epsilon = ( bits & 0x3ffffffffffL ) / ( TWO_POWER_52 + ( bits & 0x000ffc0000000000L ) ) ; double lnza = 0.0 ; double lnzb = 0.0 ; if ( hiPrec != null ) { /* split epsilon - > x */ double tmp = epsilon * HEX_40000000 ; double aa = epsilon + tmp - tmp ; double ab = epsilon - aa ; double xa = aa ; double xb = ab ; /* Need a more accurate epsilon , so adjust the division . */ final double numer = bits & 0x3ffffffffffL ; final double denom = TWO_POWER_52 + ( bits & 0x000ffc0000000000L ) ; aa = numer - xa * denom - xb * denom ; xb += aa / denom ; /* Remez polynomial evaluation */ final double [ ] lnCoef_last = LN_HI_PREC_COEF [ LN_HI_PREC_COEF . length - 1 ] ; double ya = lnCoef_last [ 0 ] ; double yb = lnCoef_last [ 1 ] ; for ( int i = LN_HI_PREC_COEF . length - 2 ; i >= 0 ; i -- ) { /* Multiply a = y * x */ aa = ya * xa ; ab = ya * xb + yb * xa + yb * xb ; /* split , so now y = a */ tmp = aa * HEX_40000000 ; ya = aa + tmp - tmp ; yb = aa - ya + ab ; /* Add a = y + lnHiPrecCoef */ final double [ ] lnCoef_i = LN_HI_PREC_COEF [ i ] ; aa = ya + lnCoef_i [ 0 ] ; ab = yb + lnCoef_i [ 1 ] ; /* Split y = a */ tmp = aa * HEX_40000000 ; ya = aa + tmp - tmp ; yb = aa - ya + ab ; } /* Multiply a = y * x */ aa = ya * xa ; ab = ya * xb + yb * xa + yb * xb ; /* split , so now lnz = a */ /* tmp = aa * 1073741824.0; lnza = aa + tmp - tmp ; lnzb = aa - lnza + ab ; */ lnza = aa + ab ; lnzb = - ( lnza - aa - ab ) ; } else { /* High precision not required . Eval Remez polynomial using standard double precision */ lnza = - 0.16624882440418567 ; lnza = lnza * epsilon + 0.19999954120254515 ; lnza = lnza * epsilon + - 0.2499999997677497 ; lnza = lnza * epsilon + 0.3333333333332802 ; lnza = lnza * epsilon + - 0.5 ; lnza = lnza * epsilon + 1.0 ; lnza = lnza * epsilon ; } /* Relative sizes : * lnzb [ 0 , 2.33E - 10] * lnm [ 1 ] [ 0 , 1.17E - 7] * ln2B * exp [ 0 , 1.12E - 4] * lnza [ 0 , 9.7E - 4] * lnm [ 0 ] [ 0 , 0.692] * ln2A * exp [ 0 , 709] */ /* Compute the following sum : * lnzb + lnm [ 1 ] + ln2B * exp + lnza + lnm [ 0 ] + ln2A * exp ; */ // return lnzb + lnm [ 1 ] + ln2B * exp + lnza + lnm [ 0 ] + ln2A * exp ; double a = LN_2_A * exp ; double b = 0.0 ; double c = a + lnm [ 0 ] ; double d = - ( c - a - lnm [ 0 ] ) ; a = c ; b = b + d ; c = a + lnza ; d = - ( c - a - lnza ) ; a = c ; b = b + d ; c = a + LN_2_B * exp ; d = - ( c - a - LN_2_B * exp ) ; a = c ; b = b + d ; c = a + lnm [ 1 ] ; d = - ( c - a - lnm [ 1 ] ) ; a = c ; b = b + d ; c = a + lnzb ; d = - ( c - a - lnzb ) ; a = c ; b = b + d ; if ( hiPrec != null ) { hiPrec [ 0 ] = a ; hiPrec [ 1 ] = b ; } return a + b ;
public class JDBC4ResultSet { /** * Moves the cursor forward one row from its current position . */ @ Override public boolean next ( ) throws SQLException { } }
checkClosed ( ) ; if ( cursorPosition == Position . afterLast || table . getActiveRowIndex ( ) == rowCount - 1 ) { cursorPosition = Position . afterLast ; return false ; } if ( cursorPosition == Position . beforeFirst ) { cursorPosition = Position . middle ; } try { return table . advanceRow ( ) ; } catch ( Exception x ) { throw SQLError . get ( x ) ; }
public class LCharIntConsumerBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LCharIntConsumer charIntConsumerFrom ( Consumer < LCharIntConsumerBuilder > buildingFunction ) { } }
LCharIntConsumerBuilder builder = new LCharIntConsumerBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class Config { /** * Sets the map of reliable topic configurations , mapped by config name . * The config name may be a pattern with which the configuration will be * obtained in the future . * @ param reliableTopicConfigs the reliable topic configuration map to set * @ return this config instance */ public Config setReliableTopicConfigs ( Map < String , ReliableTopicConfig > reliableTopicConfigs ) { } }
this . reliableTopicConfigs . clear ( ) ; this . reliableTopicConfigs . putAll ( reliableTopicConfigs ) ; for ( Entry < String , ReliableTopicConfig > entry : reliableTopicConfigs . entrySet ( ) ) { entry . getValue ( ) . setName ( entry . getKey ( ) ) ; } return this ;
public class TempByteHolder { /** * Returns InputSream for reading buffered data . * @ return InputSream for reading buffered data . */ public java . io . InputStream getInputStream ( ) { } }
if ( _input_stream == null ) { _input_stream = new TempByteHolder . InputStream ( ) ; } return _input_stream ;
public class JSONConverter { /** * Encode a NotificationSettings instance as JSON : * " deliveryInterval " : Integer * @ param out The stream to write JSON to * @ param value The NotificationSettings instance to encode . Can ' t be null . * @ throws IOException If an I / O error occurs * @ see # readNotificationSettings ( InputStream ) */ public void writeNotificationSettings ( OutputStream out , NotificationSettings value ) throws IOException { } }
writeStartObject ( out ) ; writeIntField ( out , OM_DELIVERYINTERVAL , value . deliveryInterval ) ; writeIntField ( out , OM_INBOXEXPIRTY , value . inboxExpiry ) ; writeEndObject ( out ) ;
public class ServletTimerImpl { /** * Helper to calculate when next execution time is . */ private void estimateNextExecution ( ) { } }
synchronized ( TIMER_LOCK ) { if ( fixedDelay ) { scheduledExecutionTime = period + System . currentTimeMillis ( ) ; } else { if ( firstExecution == 0 ) { // save timestamp of first execution firstExecution = scheduledExecutionTime ; } long now = System . currentTimeMillis ( ) ; long executedTime = ( numInvocations ++ * period ) ; scheduledExecutionTime = firstExecution + executedTime ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "next execution estimated to run at " + scheduledExecutionTime ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "current time is " + now ) ; } } }
public class NumericPrompt { /** * Parses a number that matches the text , null if no matches * @ param text Text to be parsed as a number * @ return Number parsed , null if no match * @ see NumericPrompt # INTEGER _ PATTERN * @ see NumericPrompt # DOUBLE _ PATTERN * @ see NumericPrompt # FLOAT _ PATTERN */ protected Number parseNumber ( String text ) { } }
if ( INTEGER_PATTERN . matcher ( text ) . matches ( ) ) { return Integer . parseInt ( text ) ; } if ( DOUBLE_PATTERN . matcher ( text ) . matches ( ) ) { return Double . parseDouble ( text ) ; } if ( FLOAT_PATTERN . matcher ( text ) . matches ( ) ) { return Float . parseFloat ( text ) ; } return null ;
public class StringHelper { /** * Optimized replace method that replaces a set of characters with a set of * strings . This method was created for efficient XML special character * replacements ! * @ param sInputString * The input string . * @ param aSearchChars * The characters to replace . * @ param aReplacementStrings * The new strings to be inserted instead . Must have the same array * length as aPatterns . * @ return The replaced version of the string or an empty char array if the * input string was < code > null < / code > . */ @ Nonnull public static char [ ] replaceMultiple ( @ Nullable final String sInputString , @ Nonnull final char [ ] aSearchChars , @ Nonnull final char [ ] [ ] aReplacementStrings ) { } }
// Any input text ? if ( hasNoText ( sInputString ) ) return ArrayHelper . EMPTY_CHAR_ARRAY ; return replaceMultiple ( sInputString . toCharArray ( ) , aSearchChars , aReplacementStrings ) ;
public class SimplePlaylistController { /** * { @ inheritDoc } */ public int previousItem ( IPlaylist playlist , int itemIndex ) { } }
if ( itemIndex > playlist . getItemSize ( ) ) { return playlist . getItemSize ( ) - 1 ; } if ( playlist . isRepeat ( ) ) { return itemIndex ; } if ( playlist . isRandom ( ) ) { int lastIndex = itemIndex ; // continuously generate a random number // until you get one that was not the last . . . while ( itemIndex == lastIndex ) { itemIndex = rand . nextInt ( playlist . getItemSize ( ) ) ; } lastIndex = itemIndex ; return itemIndex ; } int prevIndex = itemIndex - 1 ; if ( prevIndex >= 0 ) { return prevIndex ; } else if ( playlist . isRewind ( ) ) { return playlist . getItemSize ( ) - 1 ; } else { return - 1 ; }
public class Timestamps { /** * Obtain the current time from the unix epoch * @ param epochMillis gives the current time in milliseconds since since the epoch * @ return a { @ code Timestamp } corresponding to the ticker ' s current value */ public static Timestamp fromEpoch ( long epochMillis ) { } }
return Timestamp . newBuilder ( ) . setNanos ( ( int ) ( ( epochMillis % MILLIS_PER_SECOND ) * NANOS_PER_MILLI ) ) . setSeconds ( epochMillis / MILLIS_PER_SECOND ) . build ( ) ;
public class SimpleJob { /** * Default job settings . */ private void setup ( ) { } }
super . setMapperClass ( Mapper . class ) ; super . setMapOutputKeyClass ( Key . class ) ; super . setMapOutputValueClass ( Value . class ) ; super . setPartitionerClass ( SimplePartitioner . class ) ; super . setGroupingComparatorClass ( SimpleGroupingComparator . class ) ; super . setSortComparatorClass ( SimpleSortComparator . class ) ; super . setReducerClass ( Reducer . class ) ; super . setOutputKeyClass ( Key . class ) ; super . setOutputValueClass ( Value . class ) ;
public class UTF16 { /** * Adds a codepoint to offset16 position of the argument char array . * @ param target Char array to be append with the new code point * @ param limit UTF16 offset which the codepoint will be appended . * @ param char32 Code point to be appended * @ return offset after char32 in the array . * @ exception IllegalArgumentException Thrown if there is not enough space for the append , or when char32 does not * lie within the range of the Unicode codepoints . */ public static int append ( char [ ] target , int limit , int char32 ) { } }
// Check for irregular values if ( char32 < CODEPOINT_MIN_VALUE || char32 > CODEPOINT_MAX_VALUE ) { throw new IllegalArgumentException ( "Illegal codepoint" ) ; } // Write the UTF - 16 values if ( char32 >= SUPPLEMENTARY_MIN_VALUE ) { target [ limit ++ ] = getLeadSurrogate ( char32 ) ; target [ limit ++ ] = getTrailSurrogate ( char32 ) ; } else { target [ limit ++ ] = ( char ) char32 ; } return limit ;
public class ClusterClientOptions { /** * Returns a new { @ link ClusterClientOptions . Builder } initialized from { @ link ClientOptions } to construct * { @ link ClusterClientOptions } . * @ return a new { @ link ClusterClientOptions . Builder } to construct { @ link ClusterClientOptions } . * @ since 5.1.6 */ public static ClusterClientOptions . Builder builder ( ClientOptions clientOptions ) { } }
LettuceAssert . notNull ( clientOptions , "ClientOptions must not be null" ) ; if ( clientOptions instanceof ClusterClientOptions ) { return ( ( ClusterClientOptions ) clientOptions ) . mutate ( ) ; } Builder builder = new Builder ( ) ; builder . autoReconnect ( clientOptions . isAutoReconnect ( ) ) . bufferUsageRatio ( clientOptions . getBufferUsageRatio ( ) ) . cancelCommandsOnReconnectFailure ( clientOptions . isCancelCommandsOnReconnectFailure ( ) ) . disconnectedBehavior ( clientOptions . getDisconnectedBehavior ( ) ) . publishOnScheduler ( clientOptions . isPublishOnScheduler ( ) ) . pingBeforeActivateConnection ( clientOptions . isPingBeforeActivateConnection ( ) ) . requestQueueSize ( clientOptions . getRequestQueueSize ( ) ) . socketOptions ( clientOptions . getSocketOptions ( ) ) . sslOptions ( clientOptions . getSslOptions ( ) ) . suspendReconnectOnProtocolFailure ( clientOptions . isSuspendReconnectOnProtocolFailure ( ) ) . timeoutOptions ( clientOptions . getTimeoutOptions ( ) ) ; return builder ;
public class ActionRequestProcessor { protected void toNext ( ActionRuntime runtime , NextJourney journey ) throws IOException , ServletException { } }
if ( journey . hasJourneyProvider ( ) ) { // e . g . HTML / JSON response journey . getJourneyProvider ( ) . bonVoyage ( ) ; } if ( journey . hasViewRouting ( ) ) { // basically HTML response if ( journey . isRedirectTo ( ) ) { doRedirect ( runtime , journey ) ; } else { final HtmlRenderer renderer = prepareHtmlRenderer ( runtime , journey ) ; renderer . render ( getRequestManager ( ) , runtime , journey ) ; } } // do nothing if undefined
public class SahaginMain { /** * first argument is action name ( now " report " only ) , second argument is configuration file path */ public static void main ( String [ ] args ) throws YamlConvertException , IllegalDataStructureException , IllegalTestScriptException { } }
if ( args . length == 0 ) { throw new IllegalArgumentException ( MSG_NO_COMMAND_LINE_ARGUMENT ) ; } Action action = null ; try { action = Action . getEnum ( args [ 0 ] ) ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( String . format ( MSG_UNKNOWN_ACTION , args [ 0 ] ) ) ; } String configFilePath ; if ( args . length <= 1 ) { configFilePath = "sahagin.yml" ; } else { configFilePath = args [ 1 ] ; } File configFile = new File ( configFilePath ) ; if ( ! configFile . exists ( ) ) { throw new IllegalArgumentException ( String . format ( MSG_CONFIG_NOT_FOUND , configFile . getAbsolutePath ( ) ) ) ; } Config config = Config . generateFromYamlConfig ( configFile ) ; Logging . setLoggerEnabled ( config . isOutputLog ( ) ) ; AcceptableLocales locales = AcceptableLocales . getInstance ( config . getUserLocale ( ) ) ; SysMessages . globalInitialize ( locales ) ; switch ( action ) { case Report : report ( config ) ; break ; default : throw new RuntimeException ( "implementation error" ) ; }
public class Main { /** * Print a message reporting a fatal error . */ void feMessage ( Throwable ex , Options options ) { } }
log . printRawLines ( ex . getMessage ( ) ) ; if ( ex . getCause ( ) != null && options . isSet ( "dev" ) ) { ex . getCause ( ) . printStackTrace ( log . getWriter ( WriterKind . NOTICE ) ) ; }
public class ClusterTopologyRefreshScheduler { /** * Check if the { @ link EventExecutorGroup } is active * @ return false if the worker pool is terminating , shutdown or terminated */ private boolean isEventLoopActive ( ) { } }
EventExecutorGroup eventExecutors = clientResources . eventExecutorGroup ( ) ; if ( eventExecutors . isShuttingDown ( ) || eventExecutors . isShutdown ( ) || eventExecutors . isTerminated ( ) ) { return false ; } return true ;
public class DataObjectFontDescriptorImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__DO_FT_FLGS : return DO_FT_FLGS_EDEFAULT == null ? doFtFlgs != null : ! DO_FT_FLGS_EDEFAULT . equals ( doFtFlgs ) ; case AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__FONT_TECH : return FONT_TECH_EDEFAULT == null ? fontTech != null : ! FONT_TECH_EDEFAULT . equals ( fontTech ) ; case AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__VFS : return VFS_EDEFAULT == null ? vfs != null : ! VFS_EDEFAULT . equals ( vfs ) ; case AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__HFS : return HFS_EDEFAULT == null ? hfs != null : ! HFS_EDEFAULT . equals ( hfs ) ; case AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__CHAR_ROT : return CHAR_ROT_EDEFAULT == null ? charRot != null : ! CHAR_ROT_EDEFAULT . equals ( charRot ) ; case AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__ENC_ENV : return ENC_ENV_EDEFAULT == null ? encEnv != null : ! ENC_ENV_EDEFAULT . equals ( encEnv ) ; case AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__ENC_ID : return ENC_ID_EDEFAULT == null ? encID != null : ! ENC_ID_EDEFAULT . equals ( encID ) ; case AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__RESERVED : return RESERVED_EDEFAULT == null ? reserved != null : ! RESERVED_EDEFAULT . equals ( reserved ) ; } return super . eIsSet ( featureID ) ;
public class DataLoader { /** * Creates new DataLoader with the specified batch loader function and with the provided options * where the batch loader function returns a list of * { @ link org . dataloader . Try } objects . * @ param batchLoadFunction the batch load function to use that uses { @ link org . dataloader . Try } objects * @ param options the options to use * @ param < K > the key type * @ param < V > the value type * @ return a new DataLoader * @ see # newDataLoaderWithTry ( BatchLoader ) */ @ SuppressWarnings ( "unchecked" ) public static < K , V > DataLoader < K , V > newMappedDataLoaderWithTry ( MappedBatchLoaderWithContext < K , Try < V > > batchLoadFunction , DataLoaderOptions options ) { } }
return new DataLoader < > ( batchLoadFunction , options ) ;
public class Branch { /** * Branch collect the URLs in the incoming intent for better attribution . Branch SDK extensively check for any sensitive data in the URL and skip if exist . * This method allows applications specify SDK to skip any additional URL patterns to be skipped * This method should be called immediately after calling { @ link Branch # getAutoInstance ( Context ) } * @ param urlSkipPattern { @ link String } A URL pattern that Branch SDK should skip from collecting data * @ return { @ link Branch } instance for successive method calls */ public Branch addUriHostsToSkip ( String urlSkipPattern ) { } }
if ( ! TextUtils . isEmpty ( urlSkipPattern ) ) UniversalResourceAnalyser . getInstance ( context_ ) . addToSkipURLFormats ( urlSkipPattern ) ; return this ;
public class UIComponentClassicTagBase { /** * < p > Create a new child component using < code > createComponent < / code > , * initialize its properties , and add it to its parent as a facet . * @ param context { @ link FacesContext } for the current request * @ param parent Parent { @ link UIComponent } of the new facet * @ param name Name of the new facet * @ param newId id of the new facet */ private UIComponent createFacet ( FacesContext context , UIComponent parent , String name , String newId ) throws JspException { } }
UIComponent component = createComponent ( context , newId ) ; parent . getFacets ( ) . put ( name , component ) ; created = true ; return ( component ) ;
public class CommercePriceListAccountRelPersistenceImpl { /** * Returns the commerce price list account rels before and after the current commerce price list account rel in the ordered set where commercePriceListId = & # 63 ; . * @ param commercePriceListAccountRelId the primary key of the current commerce price list account rel * @ param commercePriceListId the commerce price list ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce price list account rel * @ throws NoSuchPriceListAccountRelException if a commerce price list account rel with the primary key could not be found */ @ Override public CommercePriceListAccountRel [ ] findByCommercePriceListId_PrevAndNext ( long commercePriceListAccountRelId , long commercePriceListId , OrderByComparator < CommercePriceListAccountRel > orderByComparator ) throws NoSuchPriceListAccountRelException { } }
CommercePriceListAccountRel commercePriceListAccountRel = findByPrimaryKey ( commercePriceListAccountRelId ) ; Session session = null ; try { session = openSession ( ) ; CommercePriceListAccountRel [ ] array = new CommercePriceListAccountRelImpl [ 3 ] ; array [ 0 ] = getByCommercePriceListId_PrevAndNext ( session , commercePriceListAccountRel , commercePriceListId , orderByComparator , true ) ; array [ 1 ] = commercePriceListAccountRel ; array [ 2 ] = getByCommercePriceListId_PrevAndNext ( session , commercePriceListAccountRel , commercePriceListId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class ResourceLimiter { /** * Register an operation with the given size before sending . * This call WILL BLOCK until resources are available . This method must * be paired with a call to { @ code markCanBeCompleted } in order to make sure * resources are properly released . * @ param heapSize The serialized size of the RPC to be sent * @ return A unique operation id * @ throws java . lang . InterruptedException if any . */ public long registerOperationWithHeapSize ( long heapSize ) throws InterruptedException { } }
long start = clock . nanoTime ( ) ; synchronized ( this ) { while ( unsynchronizedIsFull ( ) ) { waitForCompletions ( REGISTER_WAIT_MILLIS ) ; } long waitComplete = clock . nanoTime ( ) ; stats . markThrottling ( waitComplete - start ) ; long operationId = operationSequenceGenerator . incrementAndGet ( ) ; pendingOperationsWithSize . put ( operationId , heapSize ) ; currentWriteBufferSize += heapSize ; starTimes . put ( operationId , waitComplete ) ; return operationId ; }
public class TableProxy { /** * Retrieve this record from the key . * @ param strSeekSign Which way to seek null / = matches data also & gt ; , & lt ; , & gt ; = , and & lt ; = . * @ param strKeyArea The name of the key area to seek on . * @ param objKeyData The data for the seek ( The raw data if a single field , a BaseBuffer if multiple ) . * @ returns The record ( as a vector ) if successful , The return code ( as an Boolean ) if not . * @ exception DBException File exception . */ public Object seek ( String strSeekSign , int iOpenMode , String strKeyArea , String strFields , Object objKeyData ) throws DBException , RemoteException { } }
BaseTransport transport = this . createProxyTransport ( SEEK ) ; if ( strSeekSign == null ) strSeekSign = Constants . EQUALS ; transport . addParam ( SIGN , strSeekSign ) ; transport . addParam ( OPEN_MODE , iOpenMode ) ; transport . addParam ( KEY , strKeyArea ) ; transport . addParam ( FIELDS , strFields ) ; transport . addParam ( KEY_DATA , objKeyData ) ; Object strReturn = transport . sendMessageAndGetReply ( ) ; Object objReturn = transport . convertReturnObject ( strReturn ) ; return this . checkDBException ( objReturn ) ;
public class BlockMetadataManager { /** * Gets the metadata of a temp block . * @ param blockId the id of the temp block * @ return metadata of the block * @ throws BlockDoesNotExistException when block id can not be found */ public TempBlockMeta getTempBlockMeta ( long blockId ) throws BlockDoesNotExistException { } }
TempBlockMeta blockMeta = getTempBlockMetaOrNull ( blockId ) ; if ( blockMeta == null ) { throw new BlockDoesNotExistException ( ExceptionMessage . TEMP_BLOCK_META_NOT_FOUND , blockId ) ; } return blockMeta ;
public class ReflectionUtils { /** * 获取静态方法 * @ since 2.0.2 */ public static Method getStaticMethod ( final Class < ? > clazz , final String methodName , final Class < ? > ... parameterTypes ) { } }
Asserts . notNull ( clazz ) ; for ( Class < ? > superClass = clazz ; superClass != Object . class ; superClass = superClass . getSuperclass ( ) ) { try { Method method = clazz . getDeclaredMethod ( methodName , parameterTypes ) ; method . setAccessible ( true ) ; return method ; } catch ( NoSuchMethodException | SecurityException e ) { // Method不在当前类定义 , 继续向上转型 } } return null ;
public class ClientFactory { /** * Return a new { @ link Client } instance * @ return a new { @ link Client } instance */ public Client < ? extends Options , ? extends OptionsBuilder , ? extends RequestBuilder > newClient ( ) { } }
if ( clientClassName == null ) { return new DefaultClient ( ) ; } else { try { return ( Client ) Thread . currentThread ( ) . getContextClassLoader ( ) . loadClass ( clientClassName ) . newInstance ( ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
public class ClientConfig { /** * Set labels for the client . Deletes old labels if added earlier . * @ param labels The labels to be set * @ return configured { @ link com . hazelcast . client . config . ClientConfig } for chaining */ public ClientConfig setLabels ( Set < String > labels ) { } }
Preconditions . isNotNull ( labels , "labels" ) ; this . labels . clear ( ) ; this . labels . addAll ( labels ) ; return this ;
public class AlignmentTools { /** * Fill the aligned Atom arrays with the equivalent residues in the afpChain . * @ param afpChain * @ param ca1 * @ param ca2 * @ param ca1aligned * @ param ca2aligned */ public static void fillAlignedAtomArrays ( AFPChain afpChain , Atom [ ] ca1 , Atom [ ] ca2 , Atom [ ] ca1aligned , Atom [ ] ca2aligned ) { } }
int pos = 0 ; int [ ] blockLens = afpChain . getOptLen ( ) ; int [ ] [ ] [ ] optAln = afpChain . getOptAln ( ) ; assert ( afpChain . getBlockNum ( ) <= optAln . length ) ; for ( int block = 0 ; block < afpChain . getBlockNum ( ) ; block ++ ) { for ( int i = 0 ; i < blockLens [ block ] ; i ++ ) { int pos1 = optAln [ block ] [ 0 ] [ i ] ; int pos2 = optAln [ block ] [ 1 ] [ i ] ; Atom a1 = ca1 [ pos1 ] ; Atom a2 = ( Atom ) ca2 [ pos2 ] . clone ( ) ; ca1aligned [ pos ] = a1 ; ca2aligned [ pos ] = a2 ; pos ++ ; } } // this can happen when we load an old XML serialization which did not support modern ChemComp representation of modified residues . if ( pos != afpChain . getOptLength ( ) ) { logger . warn ( "AFPChainScorer getTMScore: Problems reconstructing alignment! nr of loaded atoms is " + pos + " but should be " + afpChain . getOptLength ( ) ) ; // we need to resize the array , because we allocated too many atoms earlier on . ca1aligned = ( Atom [ ] ) resizeArray ( ca1aligned , pos ) ; ca2aligned = ( Atom [ ] ) resizeArray ( ca2aligned , pos ) ; }
public class DiscreteFourierTransformOps { /** * Computes the phase of the complex image : < br > * phase = atan2 ( imaginary , real ) * @ param transform ( Input ) Complex interleaved image * @ param phase ( output ) Phase of image */ public static void phase ( InterleavedF32 transform , GrayF32 phase ) { } }
checkImageArguments ( phase , transform ) ; for ( int y = 0 ; y < transform . height ; y ++ ) { int indexTran = transform . startIndex + y * transform . stride ; int indexPhase = phase . startIndex + y * phase . stride ; for ( int x = 0 ; x < transform . width ; x ++ , indexTran += 2 ) { float real = transform . data [ indexTran ] ; float img = transform . data [ indexTran + 1 ] ; phase . data [ indexPhase ++ ] = ( float ) Math . atan2 ( img , real ) ; } }
public class JmsJcaActivationSpecImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . api . jmsra . JmsJcaActivationSpec # setReadAhead ( java . lang . String ) */ @ Override public void setReadAhead ( final String readAhead ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isDebugEnabled ( ) ) { SibTr . debug ( this , TRACE , "setReadAhead" , readAhead ) ; } _readAhead = readAhead ;
public class FrameScreen { /** * Process the command . * Step 1 - Process the command if possible and return true if processed . * Step 2 - If I can ' t process , pass to all children ( with me as the source ) . * Step 3 - If children didn ' t process , pass to parent ( with me as the source ) . * Note : Never pass to a parent or child the matches the source ( to avoid an endless loop ) . * @ param strCommand The command to process . * @ param sourceSField The source screen field ( to avoid echos ) . * @ param iCommandOptions If this command creates a new screen , create in a new window ? * @ param true if success . */ public boolean doCommand ( String strCommand , ScreenField sourceSField , int iCommandOptions ) { } }
if ( strCommand . equalsIgnoreCase ( ThinMenuConstants . CLOSE ) ) { this . free ( ) ; return true ; } return super . doCommand ( strCommand , sourceSField , iCommandOptions ) ;
public class JobState { /** * Get the { @ link LauncherTypeEnum } for this { @ link JobState } . */ public LauncherTypeEnum getLauncherType ( ) { } }
return Enums . getIfPresent ( LauncherTypeEnum . class , this . getProp ( ConfigurationKeys . JOB_LAUNCHER_TYPE_KEY , JobLauncherFactory . JobLauncherType . LOCAL . name ( ) ) ) . or ( LauncherTypeEnum . LOCAL ) ;
public class Bits { /** * To bytes byte [ ] . * @ param data the data * @ return the byte [ ] */ public static byte [ ] toBytes ( final long data ) { } }
return new byte [ ] { ( byte ) ( data >> 56 & 0xFF ) , ( byte ) ( data >> 48 & 0xFF ) , ( byte ) ( data >> 40 & 0xFF ) , ( byte ) ( data >> 32 & 0xFF ) , ( byte ) ( data >> 24 & 0xFF ) , ( byte ) ( data >> 16 & 0xFF ) , ( byte ) ( data >> 8 & 0xFF ) , ( byte ) ( data & 0xFF ) } ;
public class Page { /** * CAUTION : Only returns 1 result , even if several results are possible . * @ param pTitle * @ throws WikiApiException Thrown if errors occurred . */ private void fetchByTitle ( Title pTitle , boolean useExactTitle ) throws WikiApiException { } }
String searchString = pTitle . getPlainTitle ( ) ; if ( ! useExactTitle ) { searchString = pTitle . getWikiStyleTitle ( ) ; } Session session ; session = this . wiki . __getHibernateSession ( ) ; session . beginTransaction ( ) ; Integer pageId = ( Integer ) session . createNativeQuery ( "select pml.pageID from PageMapLine as pml where pml.name = :pagetitle LIMIT 1" ) . setParameter ( "pagetitle" , searchString , StringType . INSTANCE ) . uniqueResult ( ) ; session . getTransaction ( ) . commit ( ) ; if ( pageId == null ) { throw new WikiPageNotFoundException ( "No page with name " + searchString + " was found." ) ; } fetchByPageId ( pageId ) ; if ( ! this . isRedirect && searchString != null && ! searchString . equals ( getTitle ( ) . getRawTitleText ( ) ) ) { if ( this . isRedirect ) { // in case we already tried to re - retrieve the discussion page unsuccessfully , // we have to give up here or we end up in an infinite loop . // reasons for this happening might be several entries in PageMapLine with the same name but different upper / lower case variants // if the database does not allow case sensitive queries , then the API will always retrieve only the first result and if this is a redirect to a different writing variant , we are stuck in a loop . // To fix this , either a case sensitive collation should be used or the API should be able to deal with set valued results and pick the correct one from the set . // For now , we gracefully return without retrieving the Talk page for this article and throw an appropriate excption . throw new WikiPageNotFoundException ( "No discussion page with name " + searchString + " could be retrieved. This is most likely due to multiple writing variants of the same page in the database" ) ; } else { this . isRedirect = true ; /* * WORKAROUND * in our page is a redirect to a discussion page , we might not retrieve the target discussion page as expected but rather the article associated with the target discussion page * we check this here and re - retrieve the correct page . * this error should be avoided by keeping the namespace information in the database * This fix has been provided by Shiri Dori - Hacohen and is discussed in the Google Group under https : / / groups . google . com / forum / # ! topic / jwpl / 2nlr55yp87I / discussion */ if ( searchString . startsWith ( DISCUSSION_PREFIX ) && ! getTitle ( ) . getRawTitleText ( ) . startsWith ( DISCUSSION_PREFIX ) ) { try { fetchByTitle ( new Title ( DISCUSSION_PREFIX + getTitle ( ) . getRawTitleText ( ) ) , useExactTitle ) ; } catch ( WikiPageNotFoundException e ) { throw new WikiPageNotFoundException ( "No page with name " + DISCUSSION_PREFIX + getTitle ( ) . getRawTitleText ( ) + " was found." ) ; } } } }
public class DateCalculator { /** * Get the weekday of date * @ param date current date * @ return day of week */ public static int getWeekdayOfDate ( String date ) { } }
SimpleDateFormat formatter = new SimpleDateFormat ( "yyyy-MM-dd" ) ; int weekday = 0 ; Calendar c = Calendar . getInstance ( ) ; try { c . setTime ( formatter . parse ( date ) ) ; weekday = c . get ( Calendar . DAY_OF_WEEK ) ; } catch ( ParseException e ) { e . printStackTrace ( ) ; } return weekday ;
public class BrowseIterator { /** * Changes the page size of the list and reforms the list . May cause unexpected * behavior in terms of what is considered the " current page " after resizing . * @ param ps the new page size */ public void setPageSize ( int ps ) { } }
ArrayList < T > tmp = new ArrayList < T > ( ) ; for ( Collection < T > page : pages ) { for ( T item : page ) { tmp . add ( item ) ; } } setup ( tmp , ps , sorter ) ;
public class AiMaterial { /** * Returns the texture mapping mode for the w axis . < p > * If missing , defaults to { @ link AiTextureMapMode # CLAMP } * @ param type the texture type * @ param index the index in the texture stack * @ return the texture mapping mode */ public AiTextureMapMode getTextureMapModeW ( AiTextureType type , int index ) { } }
checkTexRange ( type , index ) ; Property p = getProperty ( PropertyKey . TEX_MAP_MODE_W . m_key ) ; if ( null == p || null == p . getData ( ) ) { return ( AiTextureMapMode ) m_defaults . get ( PropertyKey . TEX_MAP_MODE_W ) ; } return AiTextureMapMode . fromRawValue ( p . getData ( ) ) ;
public class SpaceCentricTypedTask { /** * Reads the information stored in a Task and sets data in the SpaceCentricTypedTask * @ param task */ public void readTask ( Task task ) { } }
Map < String , String > props = task . getProperties ( ) ; setAccount ( props . get ( ACCOUNT_PROP ) ) ; setStoreId ( props . get ( STORE_ID_PROP ) ) ; setSpaceId ( props . get ( SPACE_ID_PROP ) ) ; this . attempts = task . getAttempts ( ) ;
public class LinkHandlerImpl { /** * Resolves the link * @ param linkRequest Link request * @ return Link metadata ( never null ) */ @ NotNull @ SuppressWarnings ( { } }
"null" , "unused" } ) Link processRequest ( @ NotNull LinkRequest linkRequest ) { // detect link type - first accepting wins LinkType linkType = null ; List < Class < ? extends LinkType > > linkTypes = linkHandlerConfig . getLinkTypes ( ) ; if ( linkTypes == null || linkTypes . isEmpty ( ) ) { throw new RuntimeException ( "No link types defined." ) ; } for ( Class < ? extends LinkType > candidateLinkTypeClass : linkTypes ) { LinkType candidateLinkType = AdaptTo . notNull ( adaptable , candidateLinkTypeClass ) ; if ( candidateLinkType . accepts ( linkRequest ) ) { linkType = candidateLinkType ; break ; } } Link link = new Link ( linkType , linkRequest ) ; // preprocess link before resolving List < Class < ? extends LinkProcessor > > linkPreProcessors = linkHandlerConfig . getPreProcessors ( ) ; if ( linkPreProcessors != null ) { for ( Class < ? extends LinkProcessor > processorClass : linkPreProcessors ) { LinkProcessor processor = AdaptTo . notNull ( adaptable , processorClass ) ; link = processor . process ( link ) ; if ( link == null ) { throw new RuntimeException ( "LinkPreProcessor '" + processor + "' returned null, page '" + ( currentPage != null ? currentPage . getPath ( ) : "-" ) + "'." ) ; } } } // resolve link if ( linkType != null ) { link = linkType . resolveLink ( link ) ; if ( link == null ) { throw new RuntimeException ( "LinkType '" + linkType + "' returned null, page '" + ( currentPage != null ? currentPage . getPath ( ) : "-" ) + "'." ) ; } } // generate markup ( if markup builder is available ) - first accepting wins List < Class < ? extends LinkMarkupBuilder > > linkMarkupBuilders = linkHandlerConfig . getMarkupBuilders ( ) ; if ( linkMarkupBuilders != null ) { for ( Class < ? extends LinkMarkupBuilder > linkMarkupBuilderClass : linkMarkupBuilders ) { LinkMarkupBuilder linkMarkupBuilder = AdaptTo . notNull ( adaptable , linkMarkupBuilderClass ) ; if ( linkMarkupBuilder . accepts ( link ) ) { link . setAnchor ( linkMarkupBuilder . build ( link ) ) ; break ; } } } // postprocess link after resolving List < Class < ? extends LinkProcessor > > linkPostProcessors = linkHandlerConfig . getPostProcessors ( ) ; if ( linkPostProcessors != null ) { for ( Class < ? extends LinkProcessor > processorClass : linkPostProcessors ) { LinkProcessor processor = AdaptTo . notNull ( adaptable , processorClass ) ; link = processor . process ( link ) ; if ( link == null ) { throw new RuntimeException ( "LinkPostProcessor '" + processor + "' returned null, page '" + ( currentPage != null ? currentPage . getPath ( ) : "-" ) + "'." ) ; } } } return link ;
public class ToUnknownStream { /** * Pass the call on to the underlying handler * @ see org . xml . sax . ext . DeclHandler # elementDecl ( String , String ) */ public void elementDecl ( String arg0 , String arg1 ) throws SAXException { } }
if ( m_firstTagNotEmitted ) { emitFirstTag ( ) ; } m_handler . elementDecl ( arg0 , arg1 ) ;
public class TrmMessageFactoryImpl { /** * Create a new , empty TrmClientAttachRequest message * @ return The new TrmClientAttachRequest . * @ exception MessageCreateFailedException Thrown if such a message can not be created */ public TrmClientAttachRequest createNewTrmClientAttachRequest ( ) throws MessageCreateFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createNewTrmClientAttachRequest" ) ; TrmClientAttachRequest msg = null ; try { msg = new TrmClientAttachRequestImpl ( ) ; } catch ( MessageDecodeFailedException e ) { /* No need to FFDC this as JsMsgObject will already have done so */ // No FFDC code needed throw new MessageCreateFailedException ( e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createNewTrmClientAttachRequest" ) ; return msg ;
public class IPv6AddressSection { /** * Replaces segments starting from startIndex and ending before endIndex with the segments starting at replacementStartIndex and * ending before replacementEndIndex from the replacement section * @ param startIndex * @ param endIndex * @ param replacement * @ param replacementStartIndex * @ param replacementEndIndex * @ throws IndexOutOfBoundsException * @ throws AddressValueException if the resulting section would exceed the maximum segment count for this address type and version * @ return */ public IPv6AddressSection replace ( int startIndex , int endIndex , IPv6AddressSection replacement , int replacementStartIndex , int replacementEndIndex ) { } }
return replace ( startIndex , endIndex , replacement , replacementStartIndex , replacementEndIndex , false ) ;
public class ConnectionSpecSelector { /** * Reports a failure to complete a connection . Determines the next { @ link ConnectionSpec } to try , * if any . * @ return { @ code true } if the connection should be retried using { @ link * # configureSecureSocket ( SSLSocket ) } or { @ code false } if not */ boolean connectionFailed ( IOException e ) { } }
// Any future attempt to connect using this strategy will be a fallback attempt . isFallback = true ; if ( ! isFallbackPossible ) { return false ; } // If there was a protocol problem , don ' t recover . if ( e instanceof ProtocolException ) { return false ; } // If there was an interruption or timeout ( SocketTimeoutException ) , don ' t recover . // For the socket connect timeout case we do not try the same host with a different // ConnectionSpec : we assume it is unreachable . if ( e instanceof InterruptedIOException ) { return false ; } // Look for known client - side or negotiation errors that are unlikely to be fixed by trying // again with a different connection spec . if ( e instanceof SSLHandshakeException ) { // If the problem was a CertificateException from the X509TrustManager , do not retry . if ( e . getCause ( ) instanceof CertificateException ) { return false ; } } if ( e instanceof SSLPeerUnverifiedException ) { // e . g . a certificate pinning error . return false ; } // Retry for all other SSL failures . return e instanceof SSLException ;
public class Math { /** * Kullback - Leibler divergence . The Kullback - Leibler divergence ( also * information divergence , information gain , relative entropy , or KLIC ) * is a non - symmetric measure of the difference between two probability * distributions P and Q . KL measures the expected number of extra bits * required to code samples from P when using a code based on Q , rather * than using a code based on P . Typically P represents the " true " * distribution of data , observations , or a precise calculated theoretical * distribution . The measure Q typically represents a theory , model , * description , or approximation of P . * Although it is often intuited as a distance metric , the KL divergence is * not a true metric - for example , the KL from P to Q is not necessarily * the same as the KL from Q to P . */ public static double KullbackLeiblerDivergence ( SparseArray x , double [ ] y ) { } }
if ( x . isEmpty ( ) ) { throw new IllegalArgumentException ( "List x is empty." ) ; } Iterator < SparseArray . Entry > iter = x . iterator ( ) ; boolean intersection = false ; double kl = 0.0 ; while ( iter . hasNext ( ) ) { SparseArray . Entry b = iter . next ( ) ; int i = b . i ; if ( y [ i ] > 0 ) { intersection = true ; kl += b . x * Math . log ( b . x / y [ i ] ) ; } } if ( intersection ) { return kl ; } else { return Double . POSITIVE_INFINITY ; }
public class BehaviorTreeReader { /** * Parses the given string . * @ param string the string * @ throws SerializationException if the string cannot be successfully parsed . */ public void parse ( String string ) { } }
char [ ] data = string . toCharArray ( ) ; parse ( data , 0 , data . length ) ;
public class AmazonIdentityManagementClient { /** * Creates an alias for your AWS account . For information about using an AWS account alias , see < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / AccountAlias . html " > Using an Alias for Your AWS Account * ID < / a > in the < i > IAM User Guide < / i > . * @ param createAccountAliasRequest * @ return Result of the CreateAccountAlias operation returned by the service . * @ throws EntityAlreadyExistsException * The request was rejected because it attempted to create a resource that already exists . * @ throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits . * The error message describes the limit exceeded . * @ throws ServiceFailureException * The request processing has failed because of an unknown error , exception or failure . * @ sample AmazonIdentityManagement . CreateAccountAlias * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / CreateAccountAlias " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateAccountAliasResult createAccountAlias ( CreateAccountAliasRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateAccountAlias ( request ) ;
public class GetAccountAuthorizationDetailsResult { /** * A list containing information about IAM groups . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setGroupDetailList ( java . util . Collection ) } or { @ link # withGroupDetailList ( java . util . Collection ) } if you * want to override the existing values . * @ param groupDetailList * A list containing information about IAM groups . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetAccountAuthorizationDetailsResult withGroupDetailList ( GroupDetail ... groupDetailList ) { } }
if ( this . groupDetailList == null ) { setGroupDetailList ( new com . amazonaws . internal . SdkInternalList < GroupDetail > ( groupDetailList . length ) ) ; } for ( GroupDetail ele : groupDetailList ) { this . groupDetailList . add ( ele ) ; } return this ;
public class Queue { /** * Close this Queue * Poison Pills are used to communicate closure to connected Streams * A Poison Pill is added per connected Stream to the Queue * If a BlockingQueue is backing this async . Queue it will block until * able to add to the Queue . * @ return true if closed */ @ Override public boolean close ( ) { } }
this . open = false ; for ( int i = 0 ; i < listeningStreams . get ( ) ; i ++ ) { try { this . queue . offer ( ( T ) POISON_PILL ) ; } catch ( Exception e ) { } } return true ;
public class Framework { /** * Gets the version for a JAR file . * @ param path * Path to JAR file * @ return Found version or { @ code null } */ private static String getVersionFromJar ( final Path path ) { } }
Matcher matcher = JAR_VERSION . matcher ( path . toString ( ) ) ; if ( matcher . matches ( ) ) { return matcher . group ( 1 ) ; } else { Logger . error ( "JAR file \"{}\" does not contain a version" , path ) ; return null ; }
public class OmemoManager { /** * Return a set of all OMEMO capable devices of a contact . * Note , that this method does not explicitly refresh the device list of the contact , so it might be outdated . * @ see # requestDeviceListUpdateFor ( BareJid ) * @ param contact contact we want to get a set of device of . * @ return set of known devices of that contact . */ public Set < OmemoDevice > getDevicesOf ( BareJid contact ) { } }
OmemoCachedDeviceList list = getOmemoService ( ) . getOmemoStoreBackend ( ) . loadCachedDeviceList ( getOwnDevice ( ) , contact ) ; HashSet < OmemoDevice > devices = new HashSet < > ( ) ; for ( int deviceId : list . getActiveDevices ( ) ) { devices . add ( new OmemoDevice ( contact , deviceId ) ) ; } return devices ;
public class JawrRequestHandler { /** * Writes the content to the output stream * @ param requestedPath * the requested path * @ param request * the request * @ param response * the response * @ throws IOException * if an IOException occurs * @ throws ResourceNotFoundException * if the resource is not found */ protected void writeContent ( String requestedPath , HttpServletRequest request , HttpServletResponse response ) throws IOException , ResourceNotFoundException { } }
// Send gzipped resource if user agent supports it . int idx = requestedPath . indexOf ( BundleRenderer . GZIP_PATH_PREFIX ) ; if ( idx != - 1 ) { requestedPath = JawrConstant . URL_SEPARATOR + requestedPath . substring ( idx + BundleRenderer . GZIP_PATH_PREFIX . length ( ) , requestedPath . length ( ) ) ; if ( isValidRequestedPath ( requestedPath ) ) { response . setHeader ( CONTENT_ENCODING , GZIP ) ; bundlesHandler . streamBundleTo ( requestedPath , response . getOutputStream ( ) ) ; } else { throw new ResourceNotFoundException ( requestedPath ) ; } } else { // In debug mode , we take in account the image generated from a // StreamGenerator like classpath Image generator // The following code will rewrite the URL path for the generated // images , // because in debug mode , we are retrieving the CSS resources // directly from the webapp // and if the CSS contains generated images , we should rewrite the // URL . BinaryResourcesHandler imgRsHandler = ( BinaryResourcesHandler ) servletContext . getAttribute ( JawrConstant . BINARY_CONTEXT_ATTRIBUTE ) ; if ( imgRsHandler != null && this . jawrConfig . isDebugModeOn ( ) && resourceType . equals ( JawrConstant . CSS_TYPE ) ) { handleGeneratedCssInDebugMode ( requestedPath , request , response , imgRsHandler ) ; } else { if ( isValidRequestedPath ( requestedPath ) ) { Writer out = response . getWriter ( ) ; bundlesHandler . writeBundleTo ( requestedPath , out ) ; } else { throw new ResourceNotFoundException ( requestedPath ) ; } } }
public class I18NConnector { /** * Returns the value for this label ussing the getThreadLocaleLanguage * @ param section * @ param idInSection * @ return */ public static String getLabel ( String section , String idInSection ) { } }
Language language = getThreadLocalLanguage ( null ) ; if ( language == null ) { return idInSection ; } else { return getLabel ( language , section , idInSection ) ; }
public class ApiOvhDedicatedserver { /** * Get this object properties * REST : GET / dedicated / server / { serviceName } / intervention / { interventionId } * @ param serviceName [ required ] The internal name of your dedicated server * @ param interventionId [ required ] The intervention id */ public OvhIntervention serviceName_intervention_interventionId_GET ( String serviceName , Long interventionId ) throws IOException { } }
String qPath = "/dedicated/server/{serviceName}/intervention/{interventionId}" ; StringBuilder sb = path ( qPath , serviceName , interventionId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhIntervention . class ) ;
public class PipelineImageTransform { /** * Takes an image and executes a pipeline of combined transforms . * @ param image to transform , null = = end of stream * @ param random object to use ( or null for deterministic ) * @ return transformed image */ @ Override protected ImageWritable doTransform ( ImageWritable image , Random random ) { } }
if ( shuffle ) { Collections . shuffle ( imageTransforms ) ; } currentTransforms . clear ( ) ; // execute each item in the pipeline for ( Pair < ImageTransform , Double > tuple : imageTransforms ) { if ( tuple . getSecond ( ) == 1.0 || rng . nextDouble ( ) < tuple . getSecond ( ) ) { // probability of execution currentTransforms . add ( tuple . getFirst ( ) ) ; image = random != null ? tuple . getFirst ( ) . transform ( image , random ) : tuple . getFirst ( ) . transform ( image ) ; } } return image ;
public class DatabaseImpl { /** * Returns the subset of given the document ID / revisions that are not stored in the database . * The input revisions is a map , whose key is document ID , and value is a list of revisions . * An example input could be ( in json format ) : * { " 03ee06461a12f3c288bb865b22000170 " : * " 1 - b2e54331db828310f3c772d6e042ac9c " , * " 2-3a24009a9525bde9e4bfa8a99046b00d " * " 82e04f650661c9bdb88c57e044000a4b " : * " 3 - bb39f8c740c6ffb8614c7031b46ac162" * The output is in same format . * @ see * < a target = " _ blank " href = " http : / / wiki . apache . org / couchdb / HttpPostRevsDiff " > HttpPostRevsDiff documentation < / a > * @ param revisions a Multimap of document ID → revision ID * @ return the subset of given the document ID / revisions that are already stored in the database * @ throws IllegalArgumentException if { @ code revisions } is empty . * @ throws DocumentStoreException If it was not possible to calculate the difference between revs . */ public Map < String , List < String > > revsDiff ( final Map < String , List < String > > revisions ) throws DocumentStoreException { } }
Misc . checkState ( this . isOpen ( ) , "Database is closed" ) ; Misc . checkNotNull ( revisions , "Input revisions" ) ; Misc . checkArgument ( ! revisions . isEmpty ( ) , "revisions cannot be empty" ) ; try { ValueListMap < String , String > missingRevs = new ValueListMap < String , String > ( ) ; // Break down by docId first to avoid potential rev ID clashes between doc IDs for ( Map . Entry < String , List < String > > entry : revisions . entrySet ( ) ) { String docId = entry . getKey ( ) ; List < String > revs = entry . getValue ( ) ; // Partition into batches to avoid exceeding placeholder limit // The doc ID will use one placeholder , so use limit - 1 for the number of // revs for the remaining placeholders . List < List < String > > batches = CollectionUtils . partition ( revs , SQLITE_QUERY_PLACEHOLDERS_LIMIT - 1 ) ; for ( List < String > revsBatch : batches ) { missingRevs . addValuesToKey ( docId , get ( queue . submit ( new RevsDiffBatchCallable ( docId , revsBatch ) ) ) ) ; } } return missingRevs ; } catch ( ExecutionException e ) { String message = "Failed to calculate difference in revisions" ; logger . log ( Level . SEVERE , message , e ) ; throw new DocumentStoreException ( message , e ) ; }
public class CardAPI { /** * 修改库存 * @ param accessToken accessToken * @ param modifystock modifystock * @ return result */ public static BaseResult modifyStock ( String accessToken , ModifyStock modifystock ) { } }
return modifyStock ( accessToken , JsonUtil . toJSONString ( modifystock ) ) ;
public class AsperaTransferManager { /** * Check the LRU cache to see if the Aspera Key has already been retrieved for * this bucket . If it has , return it , else call onto the s3Client to get the * FASPConnectionInfo for the bucket name * @ param bucketName * @ return */ public FASPConnectionInfo getFaspConnectionInfo ( String bucketName ) { } }
log . trace ( "AsperaTransferManager.getFaspConnectionInfo >> start " + System . nanoTime ( ) ) ; FASPConnectionInfo faspConnectionInfo = akCache . get ( bucketName ) ; if ( null == faspConnectionInfo ) { log . trace ( "AsperaTransferManager.getFaspConnectionInfo >> retrieve from COS " + System . nanoTime ( ) ) ; faspConnectionInfo = s3Client . getBucketFaspConnectionInfo ( bucketName ) ; log . trace ( "AsperaTransferManager.getFaspConnectionInfo << retrieve from COS " + System . nanoTime ( ) ) ; if ( null == faspConnectionInfo ) { throw new SdkClientException ( "Failed to retrieve faspConnectionInfo for bucket: " + bucketName ) ; } akCache . put ( bucketName , faspConnectionInfo ) ; } log . trace ( "AsperaTransferManager.getFaspConnectionInfo << end " + System . nanoTime ( ) ) ; return faspConnectionInfo ;
public class AWSServiceDiscoveryClient { /** * Creates an HTTP namespace . Service instances that you register using an HTTP namespace can be discovered using a * < code > DiscoverInstances < / code > request but can ' t be discovered using DNS . * For the current limit on the number of namespaces that you can create using the same AWS account , see < a * href = " http : / / docs . aws . amazon . com / cloud - map / latest / dg / cloud - map - limits . html " > AWS Cloud Map Limits < / a > in the * < i > AWS Cloud Map Developer Guide < / i > . * @ param createHttpNamespaceRequest * @ return Result of the CreateHttpNamespace operation returned by the service . * @ throws InvalidInputException * One or more specified values aren ' t valid . For example , a required value might be missing , a numeric * value might be outside the allowed range , or a string value might exceed length constraints . * @ throws NamespaceAlreadyExistsException * The namespace that you ' re trying to create already exists . * @ throws ResourceLimitExceededException * The resource can ' t be created because you ' ve reached the limit on the number of resources . * @ throws DuplicateRequestException * The operation is already in progress . * @ sample AWSServiceDiscovery . CreateHttpNamespace * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / servicediscovery - 2017-03-14 / CreateHttpNamespace " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateHttpNamespaceResult createHttpNamespace ( CreateHttpNamespaceRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateHttpNamespace ( request ) ;
public class ProxyArtifactStore { /** * { @ inheritDoc } */ public long getMetadataLastModified ( String path ) throws IOException , MetadataNotFoundException { } }
Metadata metadata = getMetadata ( path ) ; if ( metadata != null ) { if ( ! StringUtils . isEmpty ( metadata . getGroupId ( ) ) || ! StringUtils . isEmpty ( metadata . getArtifactId ( ) ) || ! StringUtils . isEmpty ( metadata . getVersion ( ) ) || ( metadata . getPlugins ( ) != null && ! metadata . getPlugins ( ) . isEmpty ( ) ) || ( metadata . getVersioning ( ) != null && ( ! StringUtils . isEmpty ( metadata . getVersioning ( ) . getLastUpdated ( ) ) || ! StringUtils . isEmpty ( metadata . getVersioning ( ) . getLatest ( ) ) || ! StringUtils . isEmpty ( metadata . getVersioning ( ) . getRelease ( ) ) || ( metadata . getVersioning ( ) . getVersions ( ) != null && ! metadata . getVersioning ( ) . getVersions ( ) . isEmpty ( ) ) || ( metadata . getVersioning ( ) . getSnapshot ( ) != null ) ) ) ) { return System . currentTimeMillis ( ) ; } } throw new MetadataNotFoundException ( path ) ;
public class IOUtil { /** * Write to a file the bytes read from an input stream . * @ param filename the full or relative path to the file . */ public void inputStreamToFile ( InputStream is , String filename ) throws IOException { } }
FileOutputStream fos = new FileOutputStream ( filename ) ; inputStreamToOutputStream ( is , fos ) ; fos . close ( ) ;
public class CmsOUEditDialog { /** * Adds validators to fields . < p > */ @ SuppressWarnings ( "unchecked" ) protected void validate ( ) { } }
if ( m_ou == null ) { m_name . removeAllValidators ( ) ; m_name . addValidator ( new NameValidator ( ) ) ; } m_description . setRequired ( true ) ; m_description . setRequiredError ( "Required" ) ; if ( m_ouResources . getRows ( ) . isEmpty ( ) & ! m_webuser . getValue ( ) . booleanValue ( ) ) { CmsPathSelectField field = new CmsPathSelectField ( ) ; field . setUseRootPaths ( true ) ; field . setCmsObject ( m_cms ) ; m_ouResources . addRow ( field ) ; } for ( I_CmsEditableGroupRow row : m_ouResources . getRows ( ) ) { ( ( AbstractField < String > ) row . getComponent ( ) ) . removeAllValidators ( ) ; ( ( AbstractField < String > ) row . getComponent ( ) ) . addValidator ( new ResourceValidator ( ) ) ; }
public class FileUtil { /** * 复制文件或目录 < br > * 如果目标文件为目录 , 则将源文件以相同文件名拷贝到目标目录 * @ param srcPath 源文件或目录 * @ param destPath 目标文件或目录 , 目标不存在会自动创建 ( 目录 、 文件都创建 ) * @ param isOverride 是否覆盖目标文件 * @ return 目标目录或文件 * @ throws IORuntimeException IO异常 */ public static File copy ( String srcPath , String destPath , boolean isOverride ) throws IORuntimeException { } }
return copy ( file ( srcPath ) , file ( destPath ) , isOverride ) ;
public class CachedPasswordUserInfoService { /** * ( non - Javadoc ) * @ see org . apache . pluto . container . UserInfoService # getUserInfo ( javax . portlet . PortletRequest , org . apache . pluto . container . PortletWindow ) */ @ Override public Map < String , String > getUserInfo ( PortletRequest request , PortletWindow portletWindow ) throws PortletContainerException { } }
Map < String , String > userInfo = new HashMap < String , String > ( ) ; // check to see if a password is expected by this portlet if ( isPasswordRequested ( request , portletWindow ) ) { log . debug ( "Portlet named {} wants a password" , portletWindow . getPortletDefinition ( ) . getPortletName ( ) ) ; final HttpServletRequest httpServletRequest = this . portalRequestUtils . getPortletHttpRequest ( request ) ; final IUserInstance userInstance = userInstanceManager . getUserInstance ( httpServletRequest ) ; final IPerson person = userInstance . getPerson ( ) ; final ISecurityContext context = person . getSecurityContext ( ) ; // if it is , attempt to request a password String password = getPassword ( context ) ; log . debug ( password != null ? "Have a non-null password" : "password was null" ) ; if ( this . decryptPassword && password != null ) { log . debug ( "Attempting to decrypt password" ) ; password = stringEncryptionService . decrypt ( password ) ; log . debug ( "Password decryption complete, password is length {}" , password != null ? password . length ( ) : "is null" ) ; } if ( password != null ) { userInfo . put ( this . passwordKey , password ) ; log . debug ( "Found password with length {} for portlet name {}" , password . length ( ) != 0 ? "non-zero" : 0 , portletWindow . getPortletDefinition ( ) . getPortletName ( ) ) ; } } return userInfo ;
public class PaymentUrl { /** * Get Resource Url for GetAvailablePaymentActions * @ param orderId Unique identifier of the order . * @ param paymentId Unique identifier of the payment for which to perform the action . * @ return String Resource Url */ public static MozuUrl getAvailablePaymentActionsUrl ( String orderId , String paymentId ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/orders/{orderId}/payments/{paymentId}/actions" ) ; formatter . formatUrl ( "orderId" , orderId ) ; formatter . formatUrl ( "paymentId" , paymentId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class BeanToMapCopier { /** * If at least one property name can be converted to an assignable key , say the operation is supported and we ' ll * give it a shot . */ @ Override public boolean supports ( TherianContext context , Copy < ? extends Object , ? extends Map > copy ) { } }
if ( ! super . supports ( context , copy ) ) { return false ; } final Type targetKeyType = getKeyType ( copy . getTargetPosition ( ) ) ; final Position . ReadWrite < ? > targetKey = Positions . readWrite ( targetKeyType ) ; return getProperties ( context , copy . getSourcePosition ( ) ) . anyMatch ( propertyName -> context . supports ( Convert . to ( targetKey , Positions . readOnly ( propertyName ) ) ) ) ;
public class N1qlQuery { /** * Create a new query with named parameters . Note that the { @ link JsonObject } * should not be mutated until { @ link # n1ql ( ) } is called since it backs the * creation of the query string . * Named parameters have the form of ` $ name ` , where the ` name ` represents the unique name . The * following two examples are equivalent and compare the { @ link # simple ( Statement ) } * vs the named { @ link # parameterized ( Statement , JsonObject ) } approach : * Simple : * N1qlQuery . simple ( " SELECT * FROM ` travel - sample ` WHERE type = ' airline ' and name like ' A % ' " ) * Named Params : * N1qlQuery . parameterized ( * " SELECT * FROM ` travel - sample ` WHERE type = $ type and name like $ name " , * JsonObject . create ( ) * . put ( " type " , " airline " ) * . put ( " name " , " A % " ) * Using parameterized statements combined with non - adhoc queries ( which is configurable through * the { @ link N1qlParams } ) can provide better performance even when the actual arguments change * at execution time . * @ param statement the { @ link Statement } to execute ( containing named placeholders ) * @ param namedParams the values for the named placeholders in statement * @ param params the { @ link N1qlParams query parameters } . */ public static ParameterizedN1qlQuery parameterized ( Statement statement , JsonObject namedParams , N1qlParams params ) { } }
return new ParameterizedN1qlQuery ( statement , namedParams , params ) ;
public class QueryControllerTreeModel { /** * Search a query node in a group and returns the object else returns null . */ public QueryTreeElement getElementQuery ( String element , String group ) { } }
QueryTreeElement node = null ; Enumeration < TreeNode > elements = root . children ( ) ; while ( elements . hasMoreElements ( ) ) { TreeElement currentNode = ( TreeElement ) elements . nextElement ( ) ; if ( currentNode instanceof QueryGroupTreeElement && currentNode . getID ( ) . equals ( group ) ) { QueryGroupTreeElement groupTElement = ( QueryGroupTreeElement ) currentNode ; Enumeration < TreeNode > queries = groupTElement . children ( ) ; while ( queries . hasMoreElements ( ) ) { QueryTreeElement queryTElement = ( QueryTreeElement ) queries . nextElement ( ) ; if ( queryTElement . getID ( ) . equals ( element ) ) { node = queryTElement ; break ; } } } else { continue ; } } return node ;
public class VerificationConditionGenerator { /** * Transform a function or method declaration into verification conditions as * necessary . This is done by traversing the control - flow graph of the function * or method in question . Verifications are emitted when conditions are * encountered which must be checked . For example , that the preconditions are * met at a function invocation . * @ param declaration * The function or method declaration being translated . * @ param wyalFile * The WyAL file being constructed */ private void translateFunctionOrMethodDeclaration ( WyilFile . Decl . FunctionOrMethod declaration ) { } }
// Create the prototype for this function or method . This is the // function or method declaration which can be used within verification // conditions to refer to this function or method . This does not include // a body , since function or methods are treated as being // " uninterpreted " for the purposes of verification . createFunctionOrMethodPrototype ( declaration ) ; // Create macros representing the individual clauses of the function or // method ' s precondition and postcondition . These macros can then be // called either to assume the precondition / postcondition or to check // them . Using individual clauses helps to provide better error // messages . translatePreconditionMacros ( declaration ) ; translatePostconditionMacros ( declaration ) ; // The environments are needed to prevent clashes between variable // versions across verification conditions , and also to type variables // used in verification conditions . GlobalEnvironment globalEnvironment = new GlobalEnvironment ( declaration ) ; LocalEnvironment localEnvironment = new LocalEnvironment ( globalEnvironment ) ; // Generate the initial assumption set for a given function or method , // which roughly corresponds to its precondition . AssumptionSet assumptions = generateFunctionOrMethodAssumptionSet ( declaration , localEnvironment ) ; // Generate verification conditions by propagating forwards through the // control - flow graph of the function or method in question . For each // statement encountered , generate the preconditions which must hold // true at that point . Furthermore , generate the effect of this // statement on the current state . List < VerificationCondition > vcs = new ArrayList < > ( ) ; Context context = new Context ( wyalFile , assumptions , localEnvironment , localEnvironment , null , vcs ) ; translateStatementBlock ( declaration . getBody ( ) , context ) ; // Translate each generated verification condition into an assertion in // the underlying WyalFile . createAssertions ( declaration , vcs , globalEnvironment ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcDoorStyle ( ) { } }
if ( ifcDoorStyleEClass == null ) { ifcDoorStyleEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 196 ) ; } return ifcDoorStyleEClass ;
public class TableAppender { /** * Add XML to content . xml * @ param util an util * @ param appendable the output * @ throws IOException if the XML could not be written */ public void appendXMLToContentEntry ( final XMLUtil util , final Appendable appendable ) throws IOException { } }
this . appendPreamble ( util , appendable ) ; this . appendRows ( util , appendable ) ; this . appendPostamble ( appendable ) ;
public class HttpUtils { /** * Execute post http response . * @ param url the url * @ param basicAuthUsername the basic auth username * @ param basicAuthPassword the basic auth password * @ param entity the entity * @ param parameters the parameters * @ param headers the headers * @ return the http response */ public static HttpResponse executePost ( final String url , final String basicAuthUsername , final String basicAuthPassword , final String entity , final Map < String , Object > parameters , final Map < String , Object > headers ) { } }
try { return execute ( url , HttpMethod . POST . name ( ) , basicAuthUsername , basicAuthPassword , parameters , headers , entity ) ; } catch ( final Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; } return null ;
public class SmsRadar { /** * Stops the service and remove the SmsListener added when the SmsRadar was initialized * @ param context used to stop the service */ public static void stopSmsRadarService ( Context context ) { } }
SmsRadar . smsListener = null ; Intent intent = new Intent ( context , SmsRadarService . class ) ; context . stopService ( intent ) ;
public class BaseTagGenerator { /** * This method is called as part of the custom tag ' s end element . * If the given custom tag has a custom nesting level greater than 0, * restore its scripting variables to their original values that were * saved in the tag ' s start element . */ protected void restoreScriptingVars ( JavaCodeWriter tagWriter , int scope ) { } }
if ( nestingLevel == 0 ) { return ; } TagVariableInfo [ ] tagVarInfos = ti . getTagVariableInfos ( ) ; VariableInfo [ ] varInfos = ti . getVariableInfo ( collectedTagData . getTagData ( ) ) ; if ( varInfos == null ) varInfos = new VariableInfo [ 0 ] ; if ( ( varInfos . length == 0 ) && ( tagVarInfos . length == 0 ) ) { return ; } if ( varInfos . length > 0 ) { for ( int i = 0 ; i < varInfos . length ; i ++ ) { if ( varInfos [ i ] . getScope ( ) != scope ) continue ; if ( containsVariableInfo ( getScriptingVars ( scope ) , varInfos [ i ] ) ) { continue ; } String varName = varInfos [ i ] . getVarName ( ) ; String tmpVarName = "_jspx_" + varName + "_" + nestingLevel ; tagWriter . print ( varName ) ; tagWriter . print ( " = " ) ; tagWriter . print ( tmpVarName ) ; tagWriter . println ( ";" ) ; } } else { for ( int i = 0 ; i < tagVarInfos . length ; i ++ ) { if ( tagVarInfos [ i ] . getScope ( ) != scope ) continue ; if ( containsTagVariableInfo ( getScriptingVars ( scope ) , tagVarInfos [ i ] ) ) { continue ; } String varName = tagVarInfos [ i ] . getNameGiven ( ) ; if ( varName == null ) { varName = collectedTagData . getTagData ( ) . getAttributeString ( tagVarInfos [ i ] . getNameFromAttribute ( ) ) ; } else if ( tagVarInfos [ i ] . getNameFromAttribute ( ) != null ) { // alias continue ; } String tmpVarName = "_jspx_" + varName + "_" + nestingLevel ; tagWriter . print ( varName ) ; tagWriter . print ( " = " ) ; tagWriter . print ( tmpVarName ) ; tagWriter . println ( ";" ) ; } }
public class JSONValue { /** * Parse valid RFC4627 JSON text into java object from the input source . * @ see JSONParser * @ return Instance of the following : JSONObject , JSONArray , String , * java . lang . Number , java . lang . Boolean , null */ public static Object parseStrict ( String s ) throws ParseException { } }
return new JSONParser ( MODE_RFC4627 ) . parse ( s , defaultReader . DEFAULT ) ;
public class QueryRunner { /** * Execute a batch of SQL INSERT , UPDATE , or DELETE queries . * @ param conn The Connection to use to run the query . The caller is * responsible for closing this Connection . * @ param sql The SQL to execute . * @ param params An array of query replacement parameters . Each row in * this array is one set of batch replacement values . * @ param paramTypes Query replacement parameters types ; < code > null < / code > is a valid * value to pass in . * @ return The number of rows updated per statement . * @ throws java . sql . SQLException if a database access error occurs */ public int [ ] batch ( Connection conn , String sql , Object [ ] [ ] params , int [ ] paramTypes ) throws SQLException { } }
PreparedStatement stmt = null ; int [ ] rows = null ; try { stmt = this . prepareStatement ( conn , sql ) ; for ( int i = 0 ; i < params . length ; i ++ ) { this . fillStatement ( stmt , params [ i ] , paramTypes ) ; stmt . addBatch ( ) ; } rows = stmt . executeBatch ( ) ; } catch ( SQLException e ) { this . rethrow ( e , sql , params ) ; } finally { close ( stmt ) ; } return rows ;
public class RawPaginator { /** * This method will return a list of records for a specific page . * @ param pageNumber page number to return . This is indexed at 1 , not 0 . Any value below 1 is illegal and will * be rejected . * @ return list of records that match a query make up a " page " . */ @ SuppressWarnings ( "unchecked" ) public List < Map > getPage ( int pageNumber ) { } }
if ( pageNumber < 1 ) { throw new IllegalArgumentException ( "minimum page index == 1" ) ; } String select = subQuery == null ? dialect . formSelect ( tableName , columns , null , orderBys , pageSize , ( pageNumber - 1 ) * pageSize ) : dialect . formSelect ( tableName , columns , subQuery , orderBys , pageSize , ( pageNumber - 1 ) * pageSize ) ; currentPage = pageNumber ; return new DB ( dbName ) . findAll ( select , params ) ;
public class CQContainmentCheckUnderLIDs { /** * Extends a given substitution that maps each atom in { @ code from } to match at least one atom in { @ code to } * @ param sb * @ param from * @ param to * @ return */ private static Substitution computeSomeHomomorphism ( SubstitutionBuilder sb , List < Function > from , Map < Predicate , List < Function > > to ) { } }
int fromSize = from . size ( ) ; if ( fromSize == 0 ) return sb . getSubstituition ( ) ; // stack of partial homomorphisms Stack < SubstitutionBuilder > sbStack = new Stack < > ( ) ; sbStack . push ( sb ) ; // set the capacity to reduce memory re - allocations List < Stack < Function > > choicesMap = new ArrayList < > ( fromSize ) ; int currentAtomIdx = 0 ; while ( currentAtomIdx >= 0 ) { Function currentAtom = from . get ( currentAtomIdx ) ; Stack < Function > choices ; if ( currentAtomIdx >= choicesMap . size ( ) ) { // we have never reached this atom ( this is lazy initialization ) // initializing the stack choices = new Stack < > ( ) ; // add all choices for the current predicate symbol choices . addAll ( to . get ( currentAtom . getFunctionSymbol ( ) ) ) ; choicesMap . add ( currentAtomIdx , choices ) ; } else choices = choicesMap . get ( currentAtomIdx ) ; boolean choiceMade = false ; while ( ! choices . isEmpty ( ) ) { SubstitutionBuilder sb1 = sb . clone ( ) ; // clone ! choiceMade = extendHomomorphism ( sb1 , currentAtom , choices . pop ( ) ) ; if ( choiceMade ) { // we reached the last atom if ( currentAtomIdx == fromSize - 1 ) return sb1 . getSubstituition ( ) ; // otherwise , save the partial homomorphism sbStack . push ( sb ) ; sb = sb1 ; currentAtomIdx ++ ; // move to the next atom break ; } } if ( ! choiceMade ) { // backtracking // restore all choices for the current predicate symbol choices . addAll ( to . get ( currentAtom . getFunctionSymbol ( ) ) ) ; sb = sbStack . pop ( ) ; // restore the partial homomorphism currentAtomIdx -- ; // move to the previous atom } } // checked all possible substitutions and have not found anything return null ;
public class ResolutionQueryPlan { /** * compute the query resolution plan - list of queries ordered by their cost as computed by the graql traversal planner * @ return list of prioritised queries */ private static ImmutableList < ReasonerQueryImpl > queryPlan ( ReasonerQueryImpl query ) { } }
ResolutionPlan resolutionPlan = query . resolutionPlan ( ) ; ImmutableList < Atom > plan = resolutionPlan . plan ( ) ; TransactionOLTP tx = query . tx ( ) ; LinkedList < Atom > atoms = new LinkedList < > ( plan ) ; List < ReasonerQueryImpl > queries = new LinkedList < > ( ) ; List < Atom > nonResolvableAtoms = new ArrayList < > ( ) ; while ( ! atoms . isEmpty ( ) ) { Atom top = atoms . remove ( ) ; if ( top . isRuleResolvable ( ) ) { if ( ! nonResolvableAtoms . isEmpty ( ) ) { queries . add ( ReasonerQueries . create ( nonResolvableAtoms , tx ) ) ; nonResolvableAtoms . clear ( ) ; } queries . add ( ReasonerQueries . atomic ( top ) ) ; } else { nonResolvableAtoms . add ( top ) ; if ( atoms . isEmpty ( ) ) queries . add ( ReasonerQueries . create ( nonResolvableAtoms , tx ) ) ; } } boolean refine = plan . size ( ) != queries . size ( ) && ! query . requiresSchema ( ) ; return refine ? refine ( queries ) : ImmutableList . copyOf ( queries ) ;
public class Maestrano { /** * return the Maestrano Instance for the given preset * @ param marketplace * @ return * @ throws MnoConfigurationException * if no instance was not configured */ public static Preset get ( String marketplace ) throws MnoConfigurationException { } }
Preset maestrano = configurations . get ( marketplace ) ; if ( maestrano == null ) { throw new MnoConfigurationException ( "Maestrano was not configured for marketplace: " + marketplace + ". Maestrano.configure(" + marketplace + ") needs to have been called once." ) ; } return maestrano ;
public class ExpressionNode { /** * OGNL式オブジェクトを取得する * @ param expression 評価式 * @ return OGNL式オブジェクト */ protected Object getParsedExpression ( final String expression ) { } }
try { return Ognl . parseExpression ( expression ) ; } catch ( OgnlException ex ) { throw new OgnlRuntimeException ( "Failed to parse the expression.[" + expression + "]" , ex ) ; }
public class LinkedWorkspaceStorageCacheImpl { /** * { @ inheritDoc } */ public void stop ( ) { } }
if ( workerTimer != null ) { try { workerTimer . cancel ( ) ; } catch ( Throwable e ) // NOSONAR { LOG . warn ( this . name + " cache, stop error " + e . getMessage ( ) ) ; } } nodesCache . clear ( ) ; propertiesCache . clear ( ) ; cache . clear ( ) ;
public class StaticCodeBook { /** * in in an explicit list . Both value lists must be unpacked */ float [ ] unquantize ( ) { } }
if ( maptype == 1 || maptype == 2 ) { int quantvals ; float mindel = float32_unpack ( q_min ) ; float delta = float32_unpack ( q_delta ) ; float [ ] r = new float [ entries * dim ] ; // maptype 1 and 2 both use a quantized value vector , but // different sizes switch ( maptype ) { case 1 : // most of the time , entries % dimensions = = 0 , but we need to be // well defined . We define that the possible vales at each // scalar is values = = entries / dim . If entries % dim ! = 0 , we ' ll // have ' too few ' values ( values * dim < entries ) , which means that // we ' ll have ' left over ' entries ; left over entries use zeroed // values ( and are wasted ) . So don ' t generate codebooks like that quantvals = maptype1_quantvals ( ) ; for ( int j = 0 ; j < entries ; j ++ ) { float last = 0.f ; int indexdiv = 1 ; for ( int k = 0 ; k < dim ; k ++ ) { int index = ( j / indexdiv ) % quantvals ; float val = quantlist [ index ] ; val = Math . abs ( val ) * delta + mindel + last ; if ( q_sequencep != 0 ) last = val ; r [ j * dim + k ] = val ; indexdiv *= quantvals ; } } break ; case 2 : for ( int j = 0 ; j < entries ; j ++ ) { float last = 0.f ; for ( int k = 0 ; k < dim ; k ++ ) { float val = quantlist [ j * dim + k ] ; // if ( ( j * dim + k ) = = 0 ) { System . err . println ( " | 0 - > " + val + " | " ) ; } val = Math . abs ( val ) * delta + mindel + last ; if ( q_sequencep != 0 ) last = val ; r [ j * dim + k ] = val ; // if ( ( j * dim + k ) = = 0 ) { System . err . println ( " $ r [ 0 ] - > " + r [ 0 ] + " | " ) ; } } } // System . err . println ( " \ nr [ 0 ] = " + r [ 0 ] ) ; } return ( r ) ; } return ( null ) ;