signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CommerceOrderItemPersistenceImpl { /** * Returns the first commerce order item in the ordered set where CPInstanceId = & # 63 ; . * @ param CPInstanceId the cp instance ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce order item * @ throws NoSuchOrderItemException if a matching commerce order item could not be found */ @ Override public CommerceOrderItem findByCPInstanceId_First ( long CPInstanceId , OrderByComparator < CommerceOrderItem > orderByComparator ) throws NoSuchOrderItemException { } }
CommerceOrderItem commerceOrderItem = fetchByCPInstanceId_First ( CPInstanceId , orderByComparator ) ; if ( commerceOrderItem != null ) { return commerceOrderItem ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPInstanceId=" ) ; msg . append ( CPInstanceId ) ; msg . append ( "}" ) ; throw new NoSuchOrderItemException ( msg . toString ( ) ) ;
public class TranslateExprNodeVisitor { /** * Implementations for operators . */ @ Override protected Expression visitNullCoalescingOpNode ( NullCoalescingOpNode node ) { } }
List < Expression > operands = visitChildren ( node ) ; Expression consequent = operands . get ( 0 ) ; Expression alternate = operands . get ( 1 ) ; // if the consequent isn ' t trivial we should store the intermediate result in a new temporary if ( ! consequent . isCheap ( ) ) { consequent = codeGenerator . declarationBuilder ( ) . setRhs ( consequent ) . build ( ) . ref ( ) ; } return Expression . ifExpression ( consequent . doubleNotEquals ( Expression . LITERAL_NULL ) , consequent ) . setElse ( alternate ) . build ( codeGenerator ) ;
public class SNISSLExplorer { /** * struct { * uint32 gmt _ unix _ time ; * opaque random _ bytes [ 28 ] ; * } Random ; * opaque SessionID < 0 . . 32 > ; * uint8 CipherSuite [ 2 ] ; * enum { null ( 0 ) , ( 255 ) } CompressionMethod ; * struct { * ProtocolVersion client _ version ; * Random random ; * SessionID session _ id ; * CipherSuite cipher _ suites < 2 . . 2 ^ 16-2 > ; * CompressionMethod compression _ methods < 1 . . 2 ^ 8-1 > ; * select ( extensions _ present ) { * case false : * struct { } ; * case true : * Extension extensions < 0 . . 2 ^ 16-1 > ; * } ClientHello ; */ private static List < SNIServerName > exploreClientHello ( ByteBuffer input , byte recordMajorVersion , byte recordMinorVersion ) throws SSLException { } }
ExtensionInfo info = null ; // client version input . get ( ) ; // helloMajorVersion input . get ( ) ; // helloMinorVersion // ignore random int position = input . position ( ) ; input . position ( position + 32 ) ; // 32 : the length of Random // ignore session id ignoreByteVector8 ( input ) ; // ignore cipher _ suites int csLen = getInt16 ( input ) ; while ( csLen > 0 ) { getInt8 ( input ) ; getInt8 ( input ) ; csLen -= 2 ; } // ignore compression methods ignoreByteVector8 ( input ) ; if ( input . remaining ( ) > 0 ) { info = exploreExtensions ( input ) ; } final List < SNIServerName > snList = info != null ? info . sni : Collections . emptyList ( ) ; return snList ;
public class JMonthChooser { /** * The ItemListener for the months . * @ param e * the item event */ public void itemStateChanged ( ItemEvent e ) { } }
if ( e . getStateChange ( ) == ItemEvent . SELECTED ) { int index = comboBox . getSelectedIndex ( ) ; if ( ( index >= 0 ) && ( index != month ) ) { setMonth ( index , false ) ; } }
public class Expectation { /** * Returns a consumer that throws an { @ link AssertionError } based on the * given object . * @ param format the format to apply to the message * ( see { @ link String # format ( String , Object . . . ) } ) , there will * be only one parameter to the message * @ return a consumer object * @ since 3.2 */ public static Consumer throwAssertionError ( String format ) { } }
return object -> { throw new AssertionError ( String . format ( format , object ) ) ; } ;
public class OriginationUrl { /** * Create a OriginationUrlCreator to execute create . * @ param pathTrunkSid The SID of the Trunk to associate the resource with * @ param weight The value that determines the relative load the URI should * receive compared to others with the same priority * @ param priority The relative importance of the URI * @ param enabled Whether the URL is enabled * @ param friendlyName A string to describe the resource * @ param sipUrl The SIP address you want Twilio to route your Origination * calls to * @ return OriginationUrlCreator capable of executing the create */ public static OriginationUrlCreator creator ( final String pathTrunkSid , final Integer weight , final Integer priority , final Boolean enabled , final String friendlyName , final URI sipUrl ) { } }
return new OriginationUrlCreator ( pathTrunkSid , weight , priority , enabled , friendlyName , sipUrl ) ;
public class TrifocalTensor { /** * Converts this matrix formated trifocal into a 27 element vector : < br > * m . data [ i * 9 + j * 3 + k ] = T _ i ( j , k ) * @ param m Output : Trifocal tensor encoded in a vector */ public void convertTo ( DMatrixRMaj m ) { } }
if ( m . getNumElements ( ) != 27 ) throw new IllegalArgumentException ( "Input matrix/vector must have 27 elements" ) ; for ( int i = 0 ; i < 9 ; i ++ ) { m . data [ i ] = T1 . data [ i ] ; m . data [ i + 9 ] = T2 . data [ i ] ; m . data [ i + 18 ] = T3 . data [ i ] ; }
public class CustomerSegmentUrl { /** * Get Resource Url for GetSegment * @ param id Unique identifier of the customer segment to retrieve . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ return String Resource Url */ public static MozuUrl getSegmentUrl ( Integer id , String responseFields ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/customer/segments/{id}?responseFields={responseFields}" ) ; formatter . formatUrl ( "id" , id ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class DateFormatSymbols { /** * Returns a cached DateFormatSymbols if it ' s found in the * cache . Otherwise , this method returns a newly cached instance * for the given locale . */ private static DateFormatSymbols getCachedInstance ( Locale locale ) { } }
SoftReference < DateFormatSymbols > ref = cachedInstances . get ( locale ) ; DateFormatSymbols dfs = null ; if ( ref == null || ( dfs = ref . get ( ) ) == null ) { dfs = new DateFormatSymbols ( locale ) ; ref = new SoftReference < DateFormatSymbols > ( dfs ) ; SoftReference < DateFormatSymbols > x = cachedInstances . putIfAbsent ( locale , ref ) ; if ( x != null ) { DateFormatSymbols y = x . get ( ) ; if ( y != null ) { dfs = y ; } else { // Replace the empty SoftReference with ref . cachedInstances . put ( locale , ref ) ; } } } return dfs ;
public class AbstractCommand { /** * Search for a button representing this command in the provided container * and let it request the focus . * @ param container the container which holds the command button . * @ return < code > true < / code > if the focus request is likely to succeed . * @ see # getButtonIn ( Container ) * @ see JComponent # requestFocusInWindow ( ) */ public boolean requestFocusIn ( Container container ) { } }
AbstractButton button = getButtonIn ( container ) ; if ( button != null ) { return button . requestFocusInWindow ( ) ; } return false ;
public class GVRShadowMap { /** * Sets the direct light shadow matrix for the light from the input model / view * matrix and the shadow camera projection matrix . * @ param modelMtx light model transform ( to world coordinates ) * @ param light direct light component to update */ void setOrthoShadowMatrix ( Matrix4f modelMtx , GVRLight light ) { } }
GVROrthogonalCamera camera = ( GVROrthogonalCamera ) getCamera ( ) ; if ( camera == null ) { return ; } float w = camera . getRightClippingDistance ( ) - camera . getLeftClippingDistance ( ) ; float h = camera . getTopClippingDistance ( ) - camera . getBottomClippingDistance ( ) ; float near = camera . getNearClippingDistance ( ) ; float far = camera . getFarClippingDistance ( ) ; modelMtx . invert ( ) ; modelMtx . get ( mTempMtx ) ; camera . setViewMatrix ( mTempMtx ) ; mShadowMatrix . setOrthoSymmetric ( w , h , near , far ) ; mShadowMatrix . mul ( modelMtx ) ; sBiasMatrix . mul ( mShadowMatrix , mShadowMatrix ) ; mShadowMatrix . getColumn ( 0 , mTemp ) ; light . setVec4 ( "sm0" , mTemp . x , mTemp . y , mTemp . z , mTemp . w ) ; mShadowMatrix . getColumn ( 1 , mTemp ) ; light . setVec4 ( "sm1" , mTemp . x , mTemp . y , mTemp . z , mTemp . w ) ; mShadowMatrix . getColumn ( 2 , mTemp ) ; light . setVec4 ( "sm2" , mTemp . x , mTemp . y , mTemp . z , mTemp . w ) ; mShadowMatrix . getColumn ( 3 , mTemp ) ; light . setVec4 ( "sm3" , mTemp . x , mTemp . y , mTemp . z , mTemp . w ) ;
public class ProtoLexer { /** * $ ANTLR start " SINT32" */ public final void mSINT32 ( ) throws RecognitionException { } }
try { int _type = SINT32 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // com / dyuproject / protostuff / parser / ProtoLexer . g : 171:5 : ( ' sint32 ' ) // com / dyuproject / protostuff / parser / ProtoLexer . g : 171:9 : ' sint32' { match ( "sint32" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class ClusterEVCManager { /** * Set the EVC mode . If EVC is currently disabled , then this will enable EVC . * The parameter must specify a key to one of the EVC modes listed in the supportedEVCMode * array property . If there are no modes listed there , then EVC may not currently be enabled ; * reference the other properties in EVCState to determine what conditions are blocking EVC . * @ param evcModeKey A key referencing the desired EVC mode . * @ return Task with which to monitor the operation * @ throws RuntimeFault * @ throws RemoteException */ public Task configureEvcMode_Task ( String evcModeKey ) throws RuntimeFault , RemoteException , EVCConfigFault { } }
ManagedObjectReference task = getVimService ( ) . configureEvcMode_Task ( getMOR ( ) , evcModeKey ) ; return new Task ( getServerConnection ( ) , task ) ;
public class Util { /** * Reads contents of resource fully into a byte array . * @ param resourceName resource name . * @ return entire contents of resource as byte array . */ public static byte [ ] readResourceBytes ( String resourceName ) { } }
InputStream is = Util . class . getResourceAsStream ( resourceName ) ; try { return bytes ( is ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } finally { closeQuietly ( is ) ; }
public class StopJobRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( StopJobRequest stopJobRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( stopJobRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( stopJobRequest . getArn ( ) , ARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Mutations { /** * Extracts mutations for a range of positions in the original sequence and performs shift of corresponding * positions ( moves them to { @ code - range . from } ) . < p / > < p > Insertions before { @ code range . from } excluded . Insertions * after { @ code ( range . to - 1 ) } included . < / p > < p / > < p > < b > Important : < / b > to extract leftmost insertions ( trailing * insertions ) use { @ code range . from = - 1 } . < / p > * @ param range range * @ return mutations for a range of positions */ public Mutations < S > extractRelativeMutationsForRange ( Range range ) { } }
if ( range . isReverse ( ) ) throw new IllegalArgumentException ( "Reverse ranges are not supported by this method." ) ; return extractRelativeMutationsForRange ( range . getFrom ( ) , range . getTo ( ) ) ;
public class JCuda { /** * Query an attribute of a given memory range . < br > * < br > * Query an attribute about the memory range starting at devPtr with a size * of count bytes . The memory range must refer to managed memory allocated * via cudaMallocManaged or declared via _ _ managed _ _ variables . < br > * < br > * The attribute parameter can take the following values : < br > * < br > * < ul > * < li > * cudaMemRangeAttributeReadMostly : If this attribute is specified , data * will be interpreted as a 32 - bit integer , and dataSize must be 4 . The * result returned will be 1 if all pages in the given memory range have * read - duplication enabled , or 0 otherwise . * < / li > * < li > * cudaMemRangeAttributePreferredLocation : If this attribute is specified , * data will be interpreted as a 32 - bit integer , and dataSize must be 4 . The * result returned will be a GPU device id if all pages in the memory range * have that GPU as their preferred location , or it will be cudaCpuDeviceId * if all pages in the memory range have the CPU as their preferred * location , or it will be cudaInvalidDeviceId if either all the pages don ' t * have the same preferred location or some of the pages don ' t have a * preferred location at all . Note that the actual location of the pages in * the memory range at the time of the query may be different from the * preferred location . * < / li > * < li > * cudaMemRangeAttributeAccessedBy : If this attribute is specified , data * will be interpreted as an array of 32 - bit integers , and dataSize must be * a non - zero multiple of 4 . The result returned will be a list of device * ids that had cudaMemAdviceSetAccessedBy set for that entire memory range . * If any device does not have that advice set for the entire memory range , * that device will not be included . If data is larger than the number of * devices that have that advice set for that memory range , * cudaInvalidDeviceId will be returned in all the extra space provided . For * ex . , if dataSize is 12 ( i . e . data has 3 elements ) and only device 0 has * the advice set , then the result returned will be { 0, * cudaInvalidDeviceId , cudaInvalidDeviceId } . If data is smaller than the * number of devices that have that advice set , then only as many devices * will be returned as can fit in the array . There is no guarantee on which * specific devices will be returned , however . * < / li > * < li > * cudaMemRangeAttributeLastPrefetchLocation : If this attribute is * specified , data will be interpreted as a 32 - bit integer , and dataSize * must be 4 . The result returned will be the last location to which all * pages in the memory range were prefetched explicitly via * cudaMemPrefetchAsync . This will either be a GPU id or cudaCpuDeviceId * depending on whether the last location for prefetch was a GPU or the CPU * respectively . If any page in the memory range was never explicitly * prefetched or if all pages were not prefetched to the same location , * cudaInvalidDeviceId will be returned . Note that this simply returns the * last location that the applicaton requested to prefetch the memory range * to . It gives no indication as to whether the prefetch operation to that * location has completed or even begun . * < / li > * < / ul > * @ param data A pointers to a memory location where the result of each * attribute query will be written to . * @ param dataSize Array containing the size of data * @ param attribute The { @ link cudaMemRangeAttribute } to query * @ param devPtr Start of the range to query * @ param count Size of the range to query * @ return cudaSuccess , cudaErrorInvalidValue * @ see JCuda # cudaMemRangeGetAttributes * @ see JCuda # cudaMemPrefetchAsync * @ see JCuda # cudaMemAdvise */ public static int cudaMemRangeGetAttribute ( Pointer data , long dataSize , int attribute , Pointer devPtr , long count ) { } }
return checkResult ( cudaMemRangeGetAttributeNative ( data , dataSize , attribute , devPtr , count ) ) ;
public class BytecodeUtils { /** * Returns an { @ link Expression } with the given type that always returns null . */ public static Expression constantNull ( Type type ) { } }
checkArgument ( type . getSort ( ) == Type . OBJECT || type . getSort ( ) == Type . ARRAY , "%s is not a reference type" , type ) ; return new Expression ( type , Feature . CHEAP ) { @ Override protected void doGen ( CodeBuilder mv ) { mv . visitInsn ( Opcodes . ACONST_NULL ) ; } } ;
public class ListThingRegistrationTaskReportsResult { /** * Links to the task resources . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setResourceLinks ( java . util . Collection ) } or { @ link # withResourceLinks ( java . util . Collection ) } if you want * to override the existing values . * @ param resourceLinks * Links to the task resources . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListThingRegistrationTaskReportsResult withResourceLinks ( String ... resourceLinks ) { } }
if ( this . resourceLinks == null ) { setResourceLinks ( new java . util . ArrayList < String > ( resourceLinks . length ) ) ; } for ( String ele : resourceLinks ) { this . resourceLinks . add ( ele ) ; } return this ;
public class ApiOvhCloud { /** * Alter this object properties * REST : PUT / cloud / project / { serviceName } / alerting / { id } * @ param body [ required ] New object properties * @ param serviceName [ required ] The project id * @ param id [ required ] Alerting unique UUID */ public void project_serviceName_alerting_id_PUT ( String serviceName , String id , OvhAlerting body ) throws IOException { } }
String qPath = "/cloud/project/{serviceName}/alerting/{id}" ; StringBuilder sb = path ( qPath , serviceName , id ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class StreamMetadataTasks { /** * Update stream ' s configuration . * @ param scope scope . * @ param stream stream name . * @ param newConfig modified stream configuration . * @ param contextOpt optional context * @ return update status . */ public CompletableFuture < UpdateStreamStatus . Status > updateStream ( String scope , String stream , StreamConfiguration newConfig , OperationContext contextOpt ) { } }
final OperationContext context = contextOpt == null ? streamMetadataStore . createContext ( scope , stream ) : contextOpt ; final long requestId = requestTracker . getRequestIdFor ( "updateStream" , scope , stream ) ; // 1 . get configuration return streamMetadataStore . getConfigurationRecord ( scope , stream , context , executor ) . thenCompose ( configProperty -> { // 2 . post event to start update workflow if ( ! configProperty . getObject ( ) . isUpdating ( ) ) { return addIndexAndSubmitTask ( new UpdateStreamEvent ( scope , stream , requestId ) , // 3 . update new configuration in the store with updating flag = true // if attempt to update fails , we bail out with no harm done ( ) -> streamMetadataStore . startUpdateConfiguration ( scope , stream , newConfig , context , executor ) ) // 4 . wait for update to complete . thenCompose ( x -> checkDone ( ( ) -> isUpdated ( scope , stream , newConfig , context ) ) . thenApply ( y -> UpdateStreamStatus . Status . SUCCESS ) ) ; } else { log . warn ( requestId , "Another update in progress for {}/{}" , scope , stream ) ; return CompletableFuture . completedFuture ( UpdateStreamStatus . Status . FAILURE ) ; } } ) . exceptionally ( ex -> { log . warn ( requestId , "Exception thrown in trying to update stream configuration {}" , ex . getMessage ( ) ) ; return handleUpdateStreamError ( ex , requestId ) ; } ) ;
public class EmbeddedNeo4jEntityQueries { /** * When the id is mapped on several properties */ private ResourceIterator < Node > multiPropertiesIdFindEntities ( GraphDatabaseService executionEngine , EntityKey [ ] keys ) { } }
String query = getMultiGetQueryCacheQuery ( keys ) ; Map < String , Object > params = multiGetParams ( keys ) ; Result result = executionEngine . execute ( query , params ) ; return result . columnAs ( ENTITY_ALIAS ) ;
public class StreamUtil { /** * Copies the contents of an < code > InputStream < / code > to an * < code > OutputStream < / code > . * @ param in The < code > InputStream < / code > to read from . * @ param out The < code > OutputStream < / code > to write to . * @ throws IOException If unable to read from < code > in < / code > or write to * < code > out < / code > . */ public static void writeStream ( InputStream in , OutputStream out ) throws IOException { } }
int c ; while ( ( c = in . read ( ) ) >= 0 ) { out . write ( ( byte ) c ) ; }
public class AmazonRoute53DomainsClient { /** * This operation returns the current status of an operation that is not completed . * @ param getOperationDetailRequest * The < a > GetOperationDetail < / a > request includes the following element . * @ return Result of the GetOperationDetail operation returned by the service . * @ throws InvalidInputException * The requested item is not acceptable . For example , for an OperationId it might refer to the ID of an * operation that is already completed . For a domain name , it might not be a valid domain name or belong to * the requester account . * @ sample AmazonRoute53Domains . GetOperationDetail * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53domains - 2014-05-15 / GetOperationDetail " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetOperationDetailResult getOperationDetail ( GetOperationDetailRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetOperationDetail ( request ) ;
public class FrameOutputWriter { /** * Get the frame sizes and their contents . * @ return a content tree for the frame details */ protected Content getFrameDetails ( ) { } }
HtmlTree leftContainerDiv = new HtmlTree ( HtmlTag . DIV ) ; HtmlTree rightContainerDiv = new HtmlTree ( HtmlTag . DIV ) ; leftContainerDiv . addStyle ( HtmlStyle . leftContainer ) ; rightContainerDiv . addStyle ( HtmlStyle . rightContainer ) ; if ( noOfPackages <= 1 ) { addAllClassesFrameTag ( leftContainerDiv ) ; } else if ( noOfPackages > 1 ) { addAllPackagesFrameTag ( leftContainerDiv ) ; addAllClassesFrameTag ( leftContainerDiv ) ; } addClassFrameTag ( rightContainerDiv ) ; HtmlTree mainContainer = HtmlTree . DIV ( HtmlStyle . mainContainer , leftContainerDiv ) ; mainContainer . addContent ( rightContainerDiv ) ; return mainContainer ;
public class CanalServiceImpl { /** * 用于DO对象转化为Model对象 * @ param canalDo * @ return Canal */ private Canal doToModel ( CanalDO canalDo ) { } }
Canal canal = new Canal ( ) ; try { canal . setId ( canalDo . getId ( ) ) ; canal . setName ( canalDo . getName ( ) ) ; canal . setStatus ( canalDo . getStatus ( ) ) ; canal . setDesc ( canalDo . getDescription ( ) ) ; CanalParameter parameter = canalDo . getParameters ( ) ; AutoKeeperCluster zkCluster = autoKeeperClusterService . findAutoKeeperClusterById ( parameter . getZkClusterId ( ) ) ; if ( zkCluster != null ) { parameter . setZkClusters ( Arrays . asList ( StringUtils . join ( zkCluster . getServerList ( ) , ',' ) ) ) ; } canal . setCanalParameter ( canalDo . getParameters ( ) ) ; canal . setGmtCreate ( canalDo . getGmtCreate ( ) ) ; canal . setGmtModified ( canalDo . getGmtModified ( ) ) ; } catch ( Exception e ) { logger . error ( "ERROR ## change the canal Do to Model has an exception" ) ; throw new ManagerException ( e ) ; } return canal ;
public class OMVRBTreeEntryPersistent { /** * Delete all the nodes recursively . IF they are not loaded in memory , load all the tree . * @ throws IOException */ public OMVRBTreeEntryPersistent < K , V > delete ( ) throws IOException { } }
if ( dataProvider != null ) { pTree . removeNodeFromMemory ( this ) ; pTree . removeEntry ( dataProvider . getIdentity ( ) ) ; // EARLY LOAD LEFT AND DELETE IT RECURSIVELY if ( getLeft ( ) != null ) ( ( OMVRBTreeEntryPersistent < K , V > ) getLeft ( ) ) . delete ( ) ; // EARLY LOAD RIGHT AND DELETE IT RECURSIVELY if ( getRight ( ) != null ) ( ( OMVRBTreeEntryPersistent < K , V > ) getRight ( ) ) . delete ( ) ; // DELETE MYSELF dataProvider . removeIdentityChangedListener ( this ) ; dataProvider . delete ( ) ; clear ( ) ; } return this ;
public class EventLogQueue { /** * On insert . * @ param log * the log */ private void onInsert ( EventLog log ) { } }
if ( insertEvents == null ) { insertEvents = new ConcurrentHashMap < Object , EventLog > ( ) ; } insertEvents . put ( log . getEntityId ( ) , log ) ;
public class Presenter { /** * A convenience function creating a transformer that will repeat the source observable whenever it will complete * @ param < T > the type of the transformed observable * @ return transformer that will emit observable that will never complete ( source will be subscribed again ) */ @ NonNull private static < T > ObservableTransformer < T , T > repeatAfterCompleted ( ) { } }
return observable -> observable . repeatWhen ( completedNotification -> completedNotification ) ;
public class ExcelUtils { /** * 无模板 、 基于注解的数据导出 * @ param data 待导出数据 * @ param clazz { @ link com . github . crab2died . annotation . ExcelField } 映射对象Class * @ param isWriteHeader 是否写入表头 * @ param sheetName 指定导出Excel的sheet名称 * @ param isXSSF 导出的Excel是否为Excel2007及以上版本 ( 默认是 ) * @ param targetPath 生成的Excel输出全路径 * @ throws Excel4JException 异常 * @ throws IOException 异常 * @ author Crab2Died */ public void exportObjects2Excel ( List < ? > data , Class clazz , boolean isWriteHeader , String sheetName , boolean isXSSF , String targetPath ) throws Excel4JException , IOException { } }
try ( FileOutputStream fos = new FileOutputStream ( targetPath ) ; Workbook workbook = exportExcelNoTemplateHandler ( data , clazz , isWriteHeader , sheetName , isXSSF ) ) { workbook . write ( fos ) ; }
public class Graph { /** * Compute a reduce transformation over the edge values of each vertex . * For each vertex , the transformation consecutively calls a * { @ link ReduceEdgesFunction } until only a single value for each edge remains . * The { @ link ReduceEdgesFunction } combines two edge values into one new value of the same type . * @ param reduceEdgesFunction the reduce function to apply to the neighbors of each vertex . * @ param direction the edge direction ( in - , out - , all - ) * @ return a Dataset of Tuple2 , with one tuple per vertex . * The first field of the Tuple2 is the vertex ID and the second field * is the aggregate value computed by the provided { @ link ReduceEdgesFunction } . * @ throws IllegalArgumentException */ public DataSet < Tuple2 < K , EV > > reduceOnEdges ( ReduceEdgesFunction < EV > reduceEdgesFunction , EdgeDirection direction ) throws IllegalArgumentException { } }
switch ( direction ) { case IN : return edges . map ( new ProjectVertexWithEdgeValueMap < > ( 1 ) ) . withForwardedFields ( "f1->f0" ) . name ( "Vertex with in-edges" ) . groupBy ( 0 ) . reduce ( new ApplyReduceFunction < > ( reduceEdgesFunction ) ) . name ( "Reduce on edges" ) ; case OUT : return edges . map ( new ProjectVertexWithEdgeValueMap < > ( 0 ) ) . withForwardedFields ( "f0->f0" ) . name ( "Vertex with out-edges" ) . groupBy ( 0 ) . reduce ( new ApplyReduceFunction < > ( reduceEdgesFunction ) ) . name ( "Reduce on edges" ) ; case ALL : return edges . flatMap ( new EmitOneVertexWithEdgeValuePerNode < > ( ) ) . withForwardedFields ( "f2->f1" ) . name ( "Vertex with all edges" ) . groupBy ( 0 ) . reduce ( new ApplyReduceFunction < > ( reduceEdgesFunction ) ) . name ( "Reduce on edges" ) ; default : throw new IllegalArgumentException ( "Illegal edge direction" ) ; }
public class ConnectionParameters { /** * Sets the JSON credentials path . * @ param jsonCredentialsPath * the JSON credentials path . */ public void setJsonCredentialsFile ( String jsonCredentialsPath ) { } }
if ( ! Utility . isNullOrEmpty ( jsonCredentialsPath ) ) { setJsonCredentialsFile ( new File ( jsonCredentialsPath ) ) ; } else { setJsonCredentialsFile ( ( File ) null ) ; }
public class ResponseField { /** * Resolve field argument value by name . If argument represents a references to the variable , it will be resolved from * provided operation variables values . * @ param name argument name * @ param variables values of operation variables * @ return resolved argument value */ @ SuppressWarnings ( "unchecked" ) @ Nullable public Object resolveArgument ( @ NotNull String name , @ NotNull Operation . Variables variables ) { } }
checkNotNull ( name , "name == null" ) ; checkNotNull ( variables , "variables == null" ) ; Map < String , Object > variableValues = variables . valueMap ( ) ; Object argumentValue = arguments . get ( name ) ; if ( argumentValue instanceof Map ) { Map < String , Object > argumentValueMap = ( Map < String , Object > ) argumentValue ; if ( isArgumentValueVariableType ( argumentValueMap ) ) { String variableName = argumentValueMap . get ( VARIABLE_NAME_KEY ) . toString ( ) ; return variableValues . get ( variableName ) ; } else { return null ; } } return argumentValue ;
public class Resolver { /** * syck _ resolver _ tagurize */ @ JRubyMethod public static IRubyObject tagurize ( IRubyObject self , IRubyObject val ) { } }
IRubyObject tmp = val . checkStringType ( ) ; if ( ! tmp . isNil ( ) ) { String taguri = ImplicitScanner . typeIdToUri ( tmp . toString ( ) ) ; val = self . getRuntime ( ) . newString ( taguri ) ; } return val ;
public class CPDefinitionSpecificationOptionValuePersistenceImpl { /** * Returns the last cp definition specification option value in the ordered set where CPDefinitionId = & # 63 ; . * @ param CPDefinitionId the cp definition ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition specification option value * @ throws NoSuchCPDefinitionSpecificationOptionValueException if a matching cp definition specification option value could not be found */ @ Override public CPDefinitionSpecificationOptionValue findByCPDefinitionId_Last ( long CPDefinitionId , OrderByComparator < CPDefinitionSpecificationOptionValue > orderByComparator ) throws NoSuchCPDefinitionSpecificationOptionValueException { } }
CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue = fetchByCPDefinitionId_Last ( CPDefinitionId , orderByComparator ) ; if ( cpDefinitionSpecificationOptionValue != null ) { return cpDefinitionSpecificationOptionValue ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPDefinitionId=" ) ; msg . append ( CPDefinitionId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionSpecificationOptionValueException ( msg . toString ( ) ) ;
public class Exceptions { /** * Returns a supplier which will unwrap and rethrow any throwables caught in { @ code supplier } . * @ param supplier the supplier * @ param < T > the output type * @ param < E > the exception type * @ return a supplier */ public static < T , E extends Throwable > @ NonNull Supplier < T > unwrappingRethrowSupplier ( final @ NonNull ThrowingSupplier < T , E > supplier ) { } }
return ( ) -> { try { return supplier . throwingGet ( ) ; } catch ( final Throwable t ) { throw rethrow ( unwrap ( t ) ) ; } } ;
public class SplitMergeLineFitSegment { /** * Recursively splits pixels . Used in the initial segmentation . Only split points between * the two ends are added */ protected void splitPixels ( int indexStart , int indexStop ) { } }
// too short to split if ( indexStart + 1 >= indexStop ) return ; int indexSplit = selectSplitBetween ( indexStart , indexStop ) ; if ( indexSplit >= 0 ) { splitPixels ( indexStart , indexSplit ) ; splits . add ( indexSplit ) ; splitPixels ( indexSplit , indexStop ) ; }
public class ListSuitesResult { /** * Information about the suites . * @ param suites * Information about the suites . */ public void setSuites ( java . util . Collection < Suite > suites ) { } }
if ( suites == null ) { this . suites = null ; return ; } this . suites = new java . util . ArrayList < Suite > ( suites ) ;
public class TokenMapperGeneric { /** * { @ inheritDoc } */ @ Override @ SuppressWarnings ( "unchecked" ) public void addFields ( Document document , DecoratedKey partitionKey ) { } }
ByteBuffer bb = factory . toByteArray ( partitionKey . getToken ( ) ) ; String serialized = ByteBufferUtils . toString ( bb ) ; Field field = new StringField ( FIELD_NAME , serialized , Store . YES ) ; document . add ( field ) ;
public class FileSystem { /** * Make the given filename absolute from the given root if it is not already absolute . * < table border = " 1 " width = " 100 % " summary = " Cases " > * < thead > * < tr > * < td > { @ code filename } < / td > < td > { @ code current } < / td > < td > Result < / td > * < / tr > * < / thead > * < tr > * < td > < code > null < / code > < / td > * < td > < code > null < / code > < / td > * < td > < code > null < / code > < / td > * < / tr > * < tr > * < td > < code > null < / code > < / td > * < td > < code > file : / myroot < / code > < / td > * < td > < code > null < / code > < / td > * < / tr > * < tr > * < td > < code > null < / code > < / td > * < td > < code > http : / / host . com / myroot < / code > < / td > * < td > < code > null < / code > < / td > * < / tr > * < tr > * < td > < code > file : path / to / file < / code > < / td > * < td > < code > null < / code > < / td > * < td > < code > file : path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > file : path / to / file < / code > < / td > * < td > < code > file : / myroot < / code > < / td > * < td > < code > file : / myroot / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > file : path / to / file < / code > < / td > * < td > < code > http : / / host . com / myroot < / code > < / td > * < td > < code > http : / / host . com / myroot / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > file : / path / to / file < / code > < / td > * < td > < code > null < / code > < / td > * < td > < code > file : / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > file : / path / to / file < / code > < / td > * < td > < code > file : / myroot < / code > < / td > * < td > < code > file : / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > file : / path / to / file < / code > < / td > * < td > < code > http : / / host . com / myroot < / code > < / td > * < td > < code > file : / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > http : / / host2 . com / path / to / file < / code > < / td > * < td > < code > null < / code > < / td > * < td > < code > http : / / host2 . com / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > http : / / host2 . com / path / to / file < / code > < / td > * < td > < code > file : / myroot < / code > < / td > * < td > < code > http : / / host2 . com / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > http : / / host2 . com / path / to / file < / code > < / td > * < td > < code > http : / / host . com / myroot < / code > < / td > * < td > < code > http : / / host2 . com / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > ftp : / / host2 . com / path / to / file < / code > < / td > * < td > < code > null < / code > < / td > * < td > < code > ftp : / / host2 . com / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > ftp : / / host2 . com / path / to / file < / code > < / td > * < td > < code > file : / myroot < / code > < / td > * < td > < code > ftp : / / host2 . com / path / to / file < / code > < / td > * < / tr > * < tr > * < td > < code > ftp : / / host2 . com / path / to / file < / code > < / td > * < td > < code > http : / / host . com / myroot < / code > < / td > * < td > < code > ftp : / / host2 . com / path / to / file < / code > < / td > * < / tr > * < / table > * @ param filename is the name to make absolute . * @ param current is the current directory which permits to make absolute . * @ return an absolute filename . */ @ Pure @ SuppressWarnings ( "checkstyle:cyclomaticcomplexity" ) public static URL makeAbsolute ( URL filename , URL current ) { } }
if ( filename == null ) { return null ; } final URISchemeType scheme = URISchemeType . getSchemeType ( filename ) ; switch ( scheme ) { case JAR : try { URL jarUrl = getJarURL ( filename ) ; jarUrl = makeAbsolute ( jarUrl , current ) ; final File jarFile = getJarFile ( filename ) ; return toJarURL ( jarUrl , jarFile ) ; } catch ( MalformedURLException exception ) { // Ignore error } break ; case FILE : final File file = new File ( filename . getFile ( ) ) ; if ( ! file . isAbsolute ( ) && current != null ) { return join ( current , file ) ; } break ; // $ CASES - OMITTED $ default : // do not change the URL } return filename ;
public class FileParameterValue { /** * Serve this file parameter in response to a { @ link StaplerRequest } . * @ param request * @ param response * @ throws ServletException * @ throws IOException */ public void doDynamic ( StaplerRequest request , StaplerResponse response ) throws ServletException , IOException { } }
if ( ( "/" + originalFileName ) . equals ( request . getRestOfPath ( ) ) ) { AbstractBuild build = ( AbstractBuild ) request . findAncestor ( AbstractBuild . class ) . getObject ( ) ; File fileParameter = getLocationUnderBuild ( build ) ; if ( ! ALLOW_FOLDER_TRAVERSAL_OUTSIDE_WORKSPACE ) { File fileParameterFolder = getFileParameterFolderUnderBuild ( build ) ; // TODO can be replaced by Util # isDescendant in 2.80 + Path child = fileParameter . getAbsoluteFile ( ) . toPath ( ) . normalize ( ) ; Path parent = fileParameterFolder . getAbsoluteFile ( ) . toPath ( ) . normalize ( ) ; if ( ! child . startsWith ( parent ) ) { throw new IllegalStateException ( "The fileParameter tried to escape the expected folder: " + location ) ; } } if ( fileParameter . isFile ( ) ) { try ( InputStream data = Files . newInputStream ( fileParameter . toPath ( ) ) ) { long lastModified = fileParameter . lastModified ( ) ; long contentLength = fileParameter . length ( ) ; if ( request . hasParameter ( "view" ) ) { response . serveFile ( request , data , lastModified , contentLength , "plain.txt" ) ; } else { response . serveFile ( request , data , lastModified , contentLength , originalFileName ) ; } } catch ( InvalidPathException e ) { throw new IOException ( e ) ; } } }
public class TypeExtractor { /** * Infers cast types for each predicate in the bottom up order */ private ImmutableMap < Predicate , ImmutableList < TermType > > extractCastTypeMap ( Multimap < Predicate , CQIE > ruleIndex , List < Predicate > predicatesInBottomUp , ImmutableMap < CQIE , ImmutableList < Optional < TermType > > > termTypeMap , DBMetadata metadata ) { } }
// Append - only Map < Predicate , ImmutableList < TermType > > mutableCastMap = Maps . newHashMap ( ) ; for ( Predicate predicate : predicatesInBottomUp ) { ImmutableList < TermType > castTypes = inferCastTypes ( predicate , ruleIndex . get ( predicate ) , termTypeMap , mutableCastMap , metadata ) ; mutableCastMap . put ( predicate , castTypes ) ; } return ImmutableMap . copyOf ( mutableCastMap ) ;
public class Utils { /** * Creates a new instance of the given class . * @ param < T > the type parameter * @ param clazz the class object for which a new instance should be created . * @ return the new instance of class clazz . */ public static < T extends IDeepType > T newTypeInstance ( Class < T > clazz ) { } }
try { return clazz . newInstance ( ) ; } catch ( InstantiationException | IllegalAccessException e ) { throw new DeepGenericException ( e ) ; }
public class DrizzlePreparedStatement { /** * < p > Sets the value of the designated parameter with the given object . The second argument must be an object type ; * for integral values , the < code > java . lang < / code > equivalent objects should be used . * If the second argument is an < code > InputStream < / code > then the stream must contain the number of bytes specified * by scaleOrLength . If the second argument is a < code > Reader < / code > then the reader must contain the number of * characters specified by scaleOrLength . If these conditions are not true the driver will generate a * < code > SQLException < / code > when the prepared statement is executed . * < p > The given Java object will be converted to the given targetSqlType before being sent to the database . * If the object has a custom mapping ( is of a class implementing the interface < code > SQLData < / code > ) , the JDBC * driver should call the method < code > SQLData . writeSQL < / code > to write it to the SQL data stream . If , on the other * hand , the object is of a class implementing < code > Ref < / code > , < code > Blob < / code > , < code > Clob < / code > , * < code > NClob < / code > , < code > Struct < / code > , < code > java . net . URL < / code > , or < code > Array < / code > , the driver should pass * it to the database as a value of the corresponding SQL type . * < p > Note that this method may be used to pass database - specific abstract data types . * @ param parameterIndex the first parameter is 1 , the second is 2 , . . . * @ param x the object containing the input parameter value * @ param targetSqlType the SQL type ( as defined in java . sql . Types ) to be sent to the database . The scale argument * may further qualify this type . * @ param scaleOrLength for < code > java . sql . Types . DECIMAL < / code > or < code > java . sql . Types . NUMERIC types < / code > , this * is the number of digits after the decimal point . For Java Object types * < code > InputStream < / code > and < code > Reader < / code > , this is the length of the data in the * stream or reader . For all other types , this value will be ignored . * @ throws java . sql . SQLException if parameterIndex does not correspond to a parameter marker in the SQL statement ; * if a database access error occurs ; this method is called on a closed * < code > PreparedStatement < / code > or if the Java Object specified by x is an * InputStream or Reader object and the value of the scale parameter is less than * zero * @ throws java . sql . SQLFeatureNotSupportedException * if < code > targetSqlType < / code > is a < code > ARRAY < / code > , < code > BLOB < / code > , * < code > CLOB < / code > , < code > DATALINK < / code > , < code > JAVA _ OBJECT < / code > , * < code > NCHAR < / code > , < code > NCLOB < / code > , < code > NVARCHAR < / code > , * < code > LONGNVARCHAR < / code > , < code > REF < / code > , < code > ROWID < / code > , * < code > SQLXML < / code > or < code > STRUCT < / code > data type and the JDBC driver does not * support this data type * @ see java . sql . Types * @ since 1.6 */ public void setObject ( final int parameterIndex , final Object x , final int targetSqlType , final int scaleOrLength ) throws SQLException { } }
if ( x == null ) { setNull ( parameterIndex , targetSqlType ) ; return ; } switch ( targetSqlType ) { case Types . ARRAY : case Types . CLOB : case Types . DATALINK : case Types . NCHAR : case Types . NCLOB : case Types . NVARCHAR : case Types . LONGNVARCHAR : case Types . REF : case Types . ROWID : case Types . SQLXML : case Types . STRUCT : throw SQLExceptionMapper . getFeatureNotSupportedException ( "Datatype not supported" ) ; case Types . INTEGER : if ( x instanceof Number ) { setNumber ( parameterIndex , ( Number ) x ) ; } else { setInt ( parameterIndex , Integer . valueOf ( ( String ) x ) ) ; } } throw SQLExceptionMapper . getFeatureNotSupportedException ( "Method not yet implemented" ) ;
public class BplusTree { /** * Remove the Key from the tree ( this function is iterative ) * @ param key to delete * @ return true if key was removed and false otherwise */ protected boolean removeIterative ( final K key ) { } }
final LeafNode < K , V > nodeLeaf = findLeafNode ( key , true ) ; // Find in leaf node for key and delete it final int slot = nodeLeaf . findSlotByKey ( key ) ; if ( slot >= 0 ) { // found // Remove Key nodeLeaf . remove ( slot ) ; putNode ( nodeLeaf ) ; // Iterate back over nodes checking underflow while ( ! stackNodes . isEmpty ( ) ) { final InternalNode < K , V > node = stackNodes . pop ( ) ; final int slotcheck = stackSlots . pop ( ) ; if ( ! node . checkUnderflow ( slotcheck ) ) { return true ; } } return true ; } return false ;
public class GatewayResponseMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GatewayResponse gatewayResponse , ProtocolMarshaller protocolMarshaller ) { } }
if ( gatewayResponse == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( gatewayResponse . getResponseType ( ) , RESPONSETYPE_BINDING ) ; protocolMarshaller . marshall ( gatewayResponse . getStatusCode ( ) , STATUSCODE_BINDING ) ; protocolMarshaller . marshall ( gatewayResponse . getResponseParameters ( ) , RESPONSEPARAMETERS_BINDING ) ; protocolMarshaller . marshall ( gatewayResponse . getResponseTemplates ( ) , RESPONSETEMPLATES_BINDING ) ; protocolMarshaller . marshall ( gatewayResponse . getDefaultResponse ( ) , DEFAULTRESPONSE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DefaultInstalledExtensionRepository { /** * Register a newly installed extension in backward dependencies map . * @ param localExtension the local extension to register * @ param namespace the namespace * @ param valid is the extension valid * @ return the new { @ link DefaultInstalledExtension } */ private DefaultInstalledExtension addInstalledExtension ( LocalExtension localExtension , String namespace , boolean valid ) { } }
DefaultInstalledExtension installedExtension = this . extensions . get ( localExtension . getId ( ) ) ; if ( installedExtension == null ) { installedExtension = new DefaultInstalledExtension ( localExtension , this ) ; } installedExtension . setInstalled ( true , namespace ) ; installedExtension . setValid ( namespace , valid ) ; addInstalledExtension ( installedExtension , namespace ) ; return installedExtension ;
public class FactoryFinder { /** * Returns the location where the given Class is loaded from . */ private static String which ( Class clazz ) { } }
try { String classnameAsResource = clazz . getName ( ) . replace ( '.' , '/' ) + ".class" ; ClassLoader loader = clazz . getClassLoader ( ) ; URL it ; if ( loader != null ) { it = loader . getResource ( classnameAsResource ) ; } else { it = ClassLoader . getSystemResource ( classnameAsResource ) ; } if ( it != null ) { return it . toString ( ) ; } } // The VM ran out of memory or there was some other serious problem . Re - throw . catch ( VirtualMachineError vme ) { throw vme ; } // ThreadDeath should always be re - thrown catch ( ThreadDeath td ) { throw td ; } catch ( Throwable t ) { // work defensively . if ( debug ) { t . printStackTrace ( ) ; } } return "unknown location" ;
public class AgentManager { /** * Change state if agent logs out . * @ param event */ void handleAgentCallbackLogoffEvent ( AgentCallbackLogoffEvent event ) { } }
AsteriskAgentImpl agent = getAgentByAgentId ( "Agent/" + event . getAgent ( ) ) ; if ( agent == null ) { logger . error ( "Ignored AgentCallbackLogoffEvent for unknown agent " + event . getAgent ( ) + ". Agents: " + agents . values ( ) . toString ( ) ) ; return ; } agent . updateState ( AgentState . AGENT_LOGGEDOFF ) ;
public class ObjectValidatorSupport { /** * ネストしたプロパティの値の検証を実行する 。 * @ param < S > ネストしたプロパティのクラスタイプ * @ param validator ネストしたプロパティに対するValidator * @ param targetObject ネストしたプロパティのインスタンス * @ param errors エラー情報 * @ param subPath ネストするパス * @ param groups バリデーション時のヒントとなるグループ 。 * @ throws IllegalArgumentException { @ literal validator = = null or targetObject = = null or errors = = null } */ protected < S > void invokeNestedValidator ( final ObjectValidator < S > validator , final S targetObject , final SheetBindingErrors < ? > errors , final String subPath , final Class < ? > ... groups ) { } }
ArgUtils . notNull ( validator , "validator" ) ; ArgUtils . notNull ( targetObject , "targetObject" ) ; ArgUtils . notNull ( errors , "errors" ) ; errors . pushNestedPath ( subPath ) ; try { validator . validate ( targetObject , errors , groups ) ; } finally { errors . popNestedPath ( ) ; }
public class TransactionRomanticSnapshotBuilder { protected void setupRequestPathExp ( StringBuilder sb , RomanticTransaction tx ) { } }
final String requestPath = tx . getRequestPath ( ) ; if ( requestPath != null ) { sb . append ( ", " ) . append ( requestPath ) ; }
public class Database { /** * Determines whether or not the database has a table named " do " . * @ return true if the database contains a table with the name " do " . * @ throws Exception */ protected boolean usesDOTable ( ) throws Exception { } }
Connection conn = getConnection ( ) ; DatabaseMetaData dmd = conn . getMetaData ( ) ; // check if we need to update old table ResultSet rs = dmd . getTables ( null , null , "do%" , null ) ; while ( rs . next ( ) ) { if ( rs . getString ( "TABLE_NAME" ) . equals ( "do" ) ) { rs . close ( ) ; return true ; } } rs . close ( ) ; return false ;
public class SoftDictionary { /** * Lookup a string in the dictionary , cache result in closeMatches . * < p > If id = = null , consider any match . If id is non - null , consider * only matches to strings that don ' t have the same id , or that have * a null id . */ private void doLookup ( String id , StringWrapper toFind ) { } }
// retrain if necessary if ( distance == null ) { distance = new MyTeacher ( ) . train ( distanceLearner ) ; } // used cached values if it ' s ok if ( lastLookup == toFind ) return ; closeMatches = new HashSet ( ) ; closestMatch = null ; distanceToClosestMatch = - Double . MAX_VALUE ; // lookup best match to wrapper MyWrapper wrapper = asMyWrapper ( toFind ) ; Token [ ] tokens = wrapper . getTokens ( ) ; for ( int i = 0 ; i < tokens . length ; i ++ ) { ArrayList stringsWithToken = ( ArrayList ) index . get ( tokens [ i ] ) ; if ( stringsWithToken != null && ( ( double ) stringsWithToken . size ( ) / totalEntries ) < maxFraction ) { for ( Iterator j = stringsWithToken . iterator ( ) ; j . hasNext ( ) ; ) { MyWrapper wj = ( MyWrapper ) j . next ( ) ; String wjId = ( String ) idMap . get ( wj ) ; // if ( DEBUG ) System . out . println ( " id : " + id + " wjId : " + wjId ) ; if ( ! closeMatches . contains ( wj ) && ( wjId == null || ! wjId . equals ( id ) ) ) { double score = distance . score ( wrapper . getDistanceWrapper ( ) , wj . getDistanceWrapper ( ) ) ; if ( DEBUG ) System . out . println ( "score for " + wj + ": " + score ) ; // if ( DEBUG ) System . out . println ( distance . explainScore ( wrapper . getDistanceWrapper ( ) , wj . getDistanceWrapper ( ) ) ) ; closeMatches . add ( wj ) ; if ( score >= distanceToClosestMatch ) { // if ( DEBUG ) System . out . println ( " closest so far " ) ; distanceToClosestMatch = score ; closestMatch = wj ; } } } } } lastLookup = toFind ;
public class DialogUtils { /** * Checks if a dialog is open . * @ return true if dialog is open */ private boolean isDialogOpen ( ) { } }
final Activity activity = activityUtils . getCurrentActivity ( false ) ; final View [ ] views = viewFetcher . getWindowDecorViews ( ) ; View view = viewFetcher . getRecentDecorView ( views ) ; if ( ! isDialog ( activity , view ) ) { for ( View v : views ) { if ( isDialog ( activity , v ) ) { return true ; } } } else { return true ; } return false ;
public class ThreadPoolTaskScheduler { /** * { @ inheritDoc } * @ see org . audit4j . core . schedule . TaskScheduler # schedule ( java . lang . Runnable , org . audit4j . core . schedule . Trigger ) */ @ Override public ScheduledFuture < ? > schedule ( Runnable task , Trigger trigger ) { } }
ScheduledExecutorService executor = getScheduledExecutor ( ) ; try { ErrorHandler errorHandlerLocal = this . errorHandler != null ? this . errorHandler : TaskUtils . getDefaultErrorHandler ( true ) ; return new ReschedulingRunnable ( task , trigger , executor , errorHandlerLocal ) . schedule ( ) ; } catch ( RejectedExecutionException ex ) { throw new TaskRejectedException ( "Executor [" + executor + "] did not accept task: " + task , ex ) ; }
public class Fetch { /** * Iterates over all fields in this fetch . The key is the field name , the value is the { @ link * Fetch } for that field , or null if no Fetch is provided for that field . * @ return { @ link Iterator } over all { @ link Entry } s in this fetch . */ @ Override public Iterator < Entry < String , Fetch > > iterator ( ) { } }
return Collections . unmodifiableMap ( attrFetchMap ) . entrySet ( ) . iterator ( ) ;
public class ContentSpecProcessor { /** * Checks to see if a ContentSpec comment matches a Content Spec Entity comment . * @ param comment The ContentSpec comment object . * @ param node The Content Spec Entity comment . * @ param matchContent If the contents of the comment have to match to a reasonable extent . * @ return True if the comment is determined to match otherwise false . */ protected boolean doesCommentMatch ( final Comment comment , final CSNodeWrapper node , boolean matchContent ) { } }
if ( ! node . getNodeType ( ) . equals ( CommonConstants . CS_NODE_COMMENT ) ) return false ; // If the unique id is not from the parser , in which case it will start with a number than use the unique id to compare if ( comment . getUniqueId ( ) != null && comment . getUniqueId ( ) . matches ( "^\\d.*" ) ) { return comment . getUniqueId ( ) . equals ( Integer . toString ( node . getId ( ) ) ) ; } else if ( matchContent ) { return StringUtilities . similarDamerauLevenshtein ( comment . getText ( ) , node . getTitle ( ) ) >= ProcessorConstants . MIN_MATCH_SIMILARITY ; } else { // Check the parent has the same name if ( comment . getParent ( ) != null ) { if ( comment . getParent ( ) instanceof ContentSpec ) { return node . getParent ( ) == null ; } else if ( comment . getParent ( ) instanceof Level && node . getParent ( ) != null ) { final Level parent = ( ( Level ) comment . getParent ( ) ) ; return parent . getTitle ( ) . equals ( node . getParent ( ) . getTitle ( ) ) ; } else { return false ; } } return true ; }
public class AbstractFacade { /** * { @ inheritDoc } */ @ Override public < E extends R > void unregister ( final UniqueKey < E > uniqueKey ) { } }
synchronized ( this . componentMap ) { // Try to grab the object we want to unregister final List < E > readyObjectList = getReadyObjectList ( uniqueKey ) ; for ( final E readyObject : readyObjectList ) { // Unlisten all previously listened WaveType if ( readyObject instanceof Component < ? > ) { try { globalFacade ( ) . notifier ( ) . unlistenAll ( ( Component < ? > ) readyObject ) ; } catch ( final JRebirthThreadException e ) { LOGGER . error ( UNLISTEN_ALL_ERROR , readyObject . getClass ( ) . getSimpleName ( ) , e ) ; } } // Release the key readyObject . key ( null ) ; // Release the facade link readyObject . localFacade ( null ) ; } // Remove the component from the singleton map this . componentMap . remove ( uniqueKey ) ; }
public class ASN1 { /** * Decode an array of bytes which is supposed to be an ASN . 1 encoded structure . * This code does the decoding w / o any reference to a schema for what is being * decoded so it returns type and value pairs rather than converting the values * to the correct underlying data type . * One oddity that needs to be observed is that Object Identifiers do not have * the type and length removed from them . This is because we do a byte wise comparison * and started doing the entire item rather than just the value portion . * M00BUG - we should check that we don ' t overflow during the decoding process . * @ param offset - starting offset in array to begin decoding * @ param encoding - bytes of the ASN . 1 encoded value * @ return Decoded structure * @ throws CoseException - ASN . 1 encoding errors */ public static TagValue DecodeCompound ( int offset , byte [ ] encoding ) throws CoseException { } }
ArrayList < TagValue > result = new ArrayList < TagValue > ( ) ; int retTag = encoding [ offset ] ; // We only decode objects which are compound objects . That means that this bit must be set if ( ( encoding [ offset ] & 0x20 ) != 0x20 ) throw new CoseException ( "Invalid structure" ) ; int [ ] l = DecodeLength ( offset + 1 , encoding ) ; int sequenceLength = l [ 1 ] ; if ( offset + sequenceLength > encoding . length ) throw new CoseException ( "Invalid sequence" ) ; offset += l [ 0 ] + 1 ; while ( sequenceLength > 0 ) { int tag = encoding [ offset ] ; l = DecodeLength ( offset + 1 , encoding ) ; if ( l [ 1 ] > sequenceLength ) throw new CoseException ( "Invalid sequence" ) ; if ( ( tag & 0x20 ) != 0 ) { result . add ( DecodeCompound ( offset , encoding ) ) ; offset += 1 + l [ 0 ] + l [ 1 ] ; sequenceLength -= 1 + l [ 0 ] + l [ 1 ] ; } else { // At some point we might want to fix this . if ( tag == 6 ) { result . add ( new TagValue ( tag , Arrays . copyOfRange ( encoding , offset , offset + l [ 1 ] + l [ 0 ] + 1 ) ) ) ; } else { result . add ( new TagValue ( tag , Arrays . copyOfRange ( encoding , offset + l [ 0 ] + 1 , offset + 1 + l [ 0 ] + l [ 1 ] ) ) ) ; } offset += 1 + l [ 0 ] + l [ 1 ] ; sequenceLength -= 1 + l [ 0 ] + l [ 1 ] ; } } return new TagValue ( retTag , result ) ;
public class PeepholeMinimizeConditions { /** * Try flipping HOOKs that have negated conditions . * Returns the replacement for n or the original if no replacement was * necessary . */ private Node tryMinimizeHook ( Node n ) { } }
Node originalCond = n . getFirstChild ( ) ; MinimizedCondition minCond = MinimizedCondition . fromConditionNode ( originalCond ) ; MeasuredNode mNode = minCond . getMinimized ( MinimizationStyle . ALLOW_LEADING_NOT ) ; if ( mNode . isNot ( ) ) { // Swap the HOOK Node thenBranch = n . getSecondChild ( ) ; replaceNode ( originalCond , mNode . withoutNot ( ) ) ; n . removeChild ( thenBranch ) ; n . addChildToBack ( thenBranch ) ; reportChangeToEnclosingScope ( n ) ; } else { replaceNode ( originalCond , mNode ) ; } return n ;
public class PooledBufferedOutputStream { /** * Writes the complete contents of this byte array output stream to * the specified output stream argument , as if by calling the output * stream ' s write method using < code > out . write ( buf , 0 , count ) < / code > . * @ param out the output stream to which to write the data . * @ exception IOException if an I / O error occurs . */ public synchronized void writeTo ( final OutputStream out ) throws IOException { } }
for ( BufferPool . Buffer b : buffers ) { out . write ( b . buf , 0 , b . pos ) ; }
public class TraceEventHelper { /** * Get status * @ param input The input * @ param ignoreDelist Should DELIST be ignored * @ param ignoreTracking Should TRACKING be ignored * @ param ignoreIncomplete Ignore incomplete traces * @ return The overall result */ public static Map < String , TraceEventStatus > getStatus ( Map < String , List < TraceEvent > > input , boolean ignoreDelist , boolean ignoreTracking , boolean ignoreIncomplete ) { } }
Map < String , TraceEventStatus > result = new TreeMap < String , TraceEventStatus > ( ) ; Iterator < Map . Entry < String , List < TraceEvent > > > it = input . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Map . Entry < String , List < TraceEvent > > entry = it . next ( ) ; result . put ( entry . getKey ( ) , getStatus ( entry . getValue ( ) , ignoreDelist , ignoreTracking , ignoreIncomplete ) ) ; } return result ;
public class TwoInputUdfOperator { /** * Adds semantic information about forwarded fields of the second input of the user - defined function . * The forwarded fields information declares fields which are never modified by the function and * which are forwarded at the same position to the output or unchanged copied to another position in the output . * < p > Fields that are forwarded at the same position are specified by their position . * The specified position must be valid for the input and output data type and have the same type . * For example < code > withForwardedFieldsSecond ( " f2 " ) < / code > declares that the third field of a Java input tuple * from the second input is copied to the third field of an output tuple . * < p > Fields which are unchanged copied from the second input to another position in the output are declared * by specifying the source field reference in the second input and the target field reference in the output . * { @ code withForwardedFieldsSecond ( " f0 - > f2 " ) } denotes that the first field of the second input Java tuple is * unchanged copied to the third field of the Java output tuple . When using a wildcard ( " * " ) ensure that * the number of declared fields and their types in second input and output type match . * < p > Multiple forwarded fields can be annotated in one ( { @ code withForwardedFieldsSecond ( " f2 ; f3 - > f0 ; f4 " ) } ) * or separate Strings ( { @ code withForwardedFieldsSecond ( " f2 " , " f3 - > f0 " , " f4 " ) } ) . * Please refer to the JavaDoc of { @ link org . apache . flink . api . common . functions . Function } or Flink ' s documentation for * details on field references such as nested fields and wildcard . * < p > It is not possible to override existing semantic information about forwarded fields of the second input which was * for example added by a { @ link org . apache . flink . api . java . functions . FunctionAnnotation . ForwardedFieldsSecond } class annotation . * < p > < b > NOTE : Adding semantic information for functions is optional ! * If used correctly , semantic information can help the Flink optimizer to generate more efficient execution plans . * However , incorrect semantic information can cause the optimizer to generate incorrect execution plans which compute wrong results ! * So be careful when adding semantic information . * @ param forwardedFieldsSecond A list of forwarded field expressions for the second input of the function . * @ return This operator with annotated forwarded field information . * @ see org . apache . flink . api . java . functions . FunctionAnnotation * @ see org . apache . flink . api . java . functions . FunctionAnnotation . ForwardedFieldsSecond */ @ SuppressWarnings ( "unchecked" ) public O withForwardedFieldsSecond ( String ... forwardedFieldsSecond ) { } }
if ( this . udfSemantics == null || this . analyzedUdfSemantics ) { // extract semantic properties from function annotations setSemanticProperties ( extractSemanticAnnotationsFromUdf ( getFunction ( ) . getClass ( ) ) ) ; } if ( this . udfSemantics == null || this . analyzedUdfSemantics ) { setSemanticProperties ( new DualInputSemanticProperties ( ) ) ; SemanticPropUtil . getSemanticPropsDualFromString ( this . udfSemantics , null , forwardedFieldsSecond , null , null , null , null , getInput1Type ( ) , getInput2Type ( ) , getResultType ( ) ) ; } else { if ( udfWithForwardedFieldsSecondAnnotation ( getFunction ( ) . getClass ( ) ) ) { // refuse semantic information as it would override the function annotation throw new SemanticProperties . InvalidSemanticAnnotationException ( "Forwarded field information " + "has already been added by a function annotation for the second input of this operator. " + "Cannot overwrite function annotations." ) ; } else { SemanticPropUtil . getSemanticPropsDualFromString ( this . udfSemantics , null , forwardedFieldsSecond , null , null , null , null , getInput1Type ( ) , getInput2Type ( ) , getResultType ( ) ) ; } } O returnType = ( O ) this ; return returnType ;
public class FctBnEntitiesProcessors { /** * < p > Get PrcCsvColumnCreate ( create and put into map ) . < / p > * @ return requested PrcCsvColumnCreate * @ throws Exception - an exception */ protected final PrcCsvColumnCreate < RS > createPutPrcCsvColumnCreate ( ) throws Exception { } }
PrcCsvColumnCreate < RS > proc = new PrcCsvColumnCreate < RS > ( ) ; @ SuppressWarnings ( "unchecked" ) PrcEntityCreate < RS , CsvColumn , Long > procDlg = ( PrcEntityCreate < RS , CsvColumn , Long > ) lazyGet ( null , PrcEntityCreate . class . getSimpleName ( ) ) ; proc . setPrcEntityCreate ( procDlg ) ; // assigning fully initialized object : this . processorsMap . put ( PrcCsvColumnCreate . class . getSimpleName ( ) , proc ) ; return proc ;
public class ReconstructionDataSetIterator { /** * Like the standard next method but allows a * customizable number of examples returned * @ param num the number of examples * @ return the next data applyTransformToDestination */ @ Override public DataSet next ( int num ) { } }
DataSet ret = iter . next ( num ) ; ret . setLabels ( ret . getFeatures ( ) ) ; return ret ;
public class RecurlyClient { /** * Pause a subscription or cancel a scheduled pause on a subscription . * * For an active subscription without a pause scheduled already , this will * schedule a pause period to begin at the next renewal date for the specified * number of billing cycles ( remaining _ pause _ cycles ) . * * When a scheduled pause already exists , this will update the remaining pause * cycles with the new value sent . When zero ( 0 ) remaining _ pause _ cycles is sent * for a subscription with a scheduled pause , the pause will be canceled . * * For a paused subscription , the remaining _ pause _ cycles will adjust the * length of the current pause period . Sending zero ( 0 ) in the remaining _ pause _ cycles * field will cause the subscription to be resumed at the next renewal date . * @ param subscriptionUuid The uuid for the subscription you wish to pause . * @ param remainingPauseCycles The number of billing cycles that the subscription will be paused . * @ return Subscription */ public Subscription pauseSubscription ( final String subscriptionUuid , final int remainingPauseCycles ) { } }
Subscription request = new Subscription ( ) ; request . setRemainingPauseCycles ( remainingPauseCycles ) ; return doPUT ( Subscription . SUBSCRIPTION_RESOURCE + "/" + subscriptionUuid + "/pause" , request , Subscription . class ) ;
public class PubSub { /** * Timeout the resource * @ return A { @ link Broadcastable } used to broadcast events . */ @ GET @ Suspend ( period = 60 , timeUnit = TimeUnit . SECONDS , listeners = { } }
EventsLogger . class } ) @ Path ( "timeout" ) public Broadcastable timeout ( ) { return new Broadcastable ( broadcaster ) ;
public class ClassUtil { /** * Locates and returns the first method in the supplied class whose name is equal to the * specified name . * @ return the method with the specified name or null if no method with that name could be * found . */ public static Method findMethod ( Class < ? > clazz , String name ) { } }
Method [ ] methods = clazz . getMethods ( ) ; for ( Method method : methods ) { if ( method . getName ( ) . equals ( name ) ) { return method ; } } return null ;
public class ProjectResolver { /** * Return the { @ link MavenProject } which is the base module for scanning * and analysis . * The base module is by searching with the module tree starting from the current module over its parents until a module is found containing a * directory " jqassistant " or no parent can be determined . * @ param module The current module . * @ param rulesDirectory The name of the directory used for identifying the root module . * @ param useExecutionRootAsProjectRoot ` true ` if the execution root shall be used as project root . * @ return The { @ link MavenProject } containing a rules directory . * @ throws MojoExecutionException If the directory cannot be resolved . */ static MavenProject getRootModule ( MavenProject module , List < MavenProject > reactor , String rulesDirectory , boolean useExecutionRootAsProjectRoot ) throws MojoExecutionException { } }
String rootModuleContextKey = ProjectResolver . class . getName ( ) + "#rootModule" ; MavenProject rootModule = ( MavenProject ) module . getContextValue ( rootModuleContextKey ) ; if ( rootModule == null ) { if ( useExecutionRootAsProjectRoot ) { rootModule = getRootModule ( reactor ) ; } else { rootModule = getRootModule ( module , rulesDirectory ) ; } module . setContextValue ( rootModuleContextKey , rootModule ) ; } return rootModule ;
public class Base64 { /** * Encodes binary data using the base64 algorithm , optionally chunking the output into 76 character blocks . * @ param binaryData * Array containing binary data to encode . * @ param isChunked * if { @ code true } this encoder will chunk the base64 output into 76 character blocks * @ param urlSafe * if { @ code true } this encoder will emit - and _ instead of the usual + and / characters . * < b > Note : no padding is added when encoding using the URL - safe alphabet . < / b > * @ return Base64 - encoded data . * @ throws IllegalArgumentException * Thrown when the input array needs an output array bigger than { @ link Integer # MAX _ VALUE } * @ since 1.4 */ public static byte [ ] encodeBase64 ( final byte [ ] binaryData , final boolean isChunked , final boolean urlSafe ) { } }
return encodeBase64 ( binaryData , isChunked , urlSafe , Integer . MAX_VALUE ) ;
public class WeightInitUtil { /** * Reshape the parameters view , without modifying the paramsView array values . * @ param shape Shape to reshape * @ param paramsView Parameters array view * @ param flatteningOrder Order in which parameters are flattened / reshaped */ public static INDArray reshapeWeights ( long [ ] shape , INDArray paramsView , char flatteningOrder ) { } }
return paramsView . reshape ( flatteningOrder , shape ) ;
public class HtmlWriter { /** * Checks if a given font is the same as the font that was last used . * @ param font the font of an object * @ return true if the font differs */ public boolean isOtherFont ( Font font ) { } }
try { Font cFont = ( Font ) currentfont . peek ( ) ; if ( cFont . compareTo ( font ) == 0 ) return false ; return true ; } catch ( EmptyStackException ese ) { if ( standardfont . compareTo ( font ) == 0 ) return false ; return true ; }
public class Admin { /** * @ throws PageException */ private void doUpdateDefaultPassword ( ) throws PageException { } }
try { admin . updateDefaultPassword ( getString ( "admin" , action , "newPassword" ) ) ; } catch ( Exception e ) { throw Caster . toPageException ( e ) ; } store ( ) ;
public class KeyStores { /** * Load keystore from InputStream , close the stream after load succeed or failed . */ public static KeyStore load ( InputStream in , char [ ] password ) { } }
try { KeyStore myTrustStore = KeyStore . getInstance ( KeyStore . getDefaultType ( ) ) ; myTrustStore . load ( in , password ) ; return myTrustStore ; } catch ( CertificateException | NoSuchAlgorithmException | KeyStoreException | IOException e ) { throw new TrustManagerLoadFailedException ( e ) ; } finally { Closeables . closeQuietly ( in ) ; }
public class ModelLog { /** * Adds an element to the list , and if recordOnDisk is true , appends it to the backing store . * @ param m the model to add * @ param recordOnDisk whether or not to add to the backing disk store * @ return success */ protected boolean add ( GraphicalModel m , boolean recordOnDisk ) { } }
boolean success = super . add ( m ) ; if ( ! success ) return false ; if ( recordOnDisk ) { try { if ( writeWithFactors ) { // Attempt to record this to the backing log file , with the factors writeExample ( m ) ; } else { // Attempt to record this to the backing log file , without the factors Set < GraphicalModel . Factor > cachedFactors = m . factors ; m . factors = new HashSet < > ( ) ; writeExample ( m ) ; m . factors = cachedFactors ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } } return true ;
public class Range { /** * Converts the range to a JavaScript number array . * @ return a JavaScript number array */ public JsArrayNumber toJsArray ( ) { } }
JsArrayNumber array = JavaScriptObject . createArray ( ) . cast ( ) ; array . push ( minValue ) ; array . push ( maxValue ) ; return array ;
public class ZkClient { /** * Gets the acl on path * @ param path * @ return an entry instance with key = list of acls on node and value = stats . * @ throws ZkException * if any ZooKeeper exception occurred * @ throws RuntimeException * if any other exception occurs */ public Map . Entry < List < ACL > , Stat > getAcl ( final String path ) throws ZkException { } }
if ( path == null ) { throw new NullPointerException ( "Missing value for path" ) ; } if ( ! exists ( path ) ) { throw new RuntimeException ( "trying to get acls on non existing node " + path ) ; } return retryUntilConnected ( new Callable < Map . Entry < List < ACL > , Stat > > ( ) { @ Override public Map . Entry < List < ACL > , Stat > call ( ) throws Exception { return _connection . getAcl ( path ) ; } } ) ;
public class SnapshotVerifier { /** * Perform snapshot verification . * @ param directories list of directories to search for snapshots * @ param snapshotNames set of snapshot names / nonces to verify */ public static void verifySnapshots ( final List < String > directories , final Set < String > snapshotNames ) { } }
FileFilter filter = new SnapshotFilter ( ) ; if ( ! snapshotNames . isEmpty ( ) ) { filter = new SpecificSnapshotFilter ( snapshotNames ) ; } Map < String , Snapshot > snapshots = new HashMap < String , Snapshot > ( ) ; for ( String directory : directories ) { SnapshotUtil . retrieveSnapshotFiles ( new File ( directory ) , snapshots , filter , true , SnapshotPathType . SNAP_PATH , CONSOLE_LOG ) ; } if ( snapshots . isEmpty ( ) ) { System . out . println ( "Snapshot corrupted" ) ; System . out . println ( "No files found" ) ; } for ( Snapshot s : snapshots . values ( ) ) { System . out . println ( SnapshotUtil . generateSnapshotReport ( s . getTxnId ( ) , s ) . getSecond ( ) ) ; }
public class GelfLoggingFilter { /** * The < code > doFilter < / code > method of the Filter is called by the * container each time a request / response pair is passed through the * chain due to a client request for a resource at the end of the chain . * The FilterChain passed in to this method allows the Filter to pass * on the request and response to the next entity in the chain . * < p > A typical implementation of this method would follow the following * pattern : * < ol > * < li > Examine the request * < li > Optionally wrap the request object with a custom implementation to * filter content or headers for input filtering * < li > Optionally wrap the response object with a custom implementation to * filter content or headers for output filtering * < li > * < ul > * < li > < strong > Either < / strong > invoke the next entity in the chain * using the FilterChain object * ( < code > chain . doFilter ( ) < / code > ) , * < li > < strong > or < / strong > not pass on the request / response pair to * the next entity in the filter chain to * block the request processing * < / ul > * < li > Directly set headers on the response after invocation of the * next entity in the filter chain . * < / ol > */ @ Override public void doFilter ( final ServletRequest request , final ServletResponse response , final FilterChain chain ) throws IOException , ServletException { } }
// It ' s quite safe to assume that we only receive HTTP requests final HttpServletRequest httpRequest = ( HttpServletRequest ) request ; final HttpServletResponse httpResponse = ( HttpServletResponse ) response ; final StringBuilder buf = new StringBuilder ( 256 ) ; final Optional < String > address = Optional . ofNullable ( httpRequest . getHeader ( HttpHeaders . X_FORWARDED_FOR ) ) ; final String clientAddress = address . orElse ( request . getRemoteAddr ( ) ) ; buf . append ( clientAddress ) ; buf . append ( " - " ) ; final String authType = httpRequest . getAuthType ( ) ; if ( authType != null ) { buf . append ( httpRequest . getUserPrincipal ( ) . getName ( ) ) ; } else { buf . append ( "-" ) ; } buf . append ( " \"" ) ; buf . append ( httpRequest . getMethod ( ) ) ; buf . append ( ' ' ) ; buf . append ( httpRequest . getRequestURI ( ) ) ; buf . append ( ' ' ) ; buf . append ( request . getProtocol ( ) ) ; buf . append ( "\" " ) ; final CountingHttpServletResponseWrapper responseWrapper = new CountingHttpServletResponseWrapper ( httpResponse ) ; final Stopwatch stopwatch = Stopwatch . createUnstarted ( ) ; stopwatch . start ( ) ; try { chain . doFilter ( request , responseWrapper ) ; } finally { if ( request . isAsyncStarted ( ) ) { final AsyncListener listener = new LoggingAsyncListener ( buf , stopwatch , authType , clientAddress , httpRequest , responseWrapper ) ; request . getAsyncContext ( ) . addListener ( listener ) ; } else { logRequest ( buf , stopwatch , authType , clientAddress , httpRequest , responseWrapper ) ; } }
public class CodingAnnotationStudy { /** * Category - > # */ public static Map < Object , Integer > countTotalAnnotationsPerCategory ( final ICodingAnnotationStudy study ) { } }
Map < Object , Integer > result = new HashMap < Object , Integer > ( ) ; for ( ICodingAnnotationItem item : study . getItems ( ) ) { if ( item . getRaterCount ( ) <= 1 ) { continue ; } for ( IAnnotationUnit unit : item . getUnits ( ) ) { Object category = unit . getCategory ( ) ; if ( category == null ) { continue ; } Integer count = result . get ( category ) ; if ( count == null ) { result . put ( category , 1 ) ; } else { result . put ( category , count + 1 ) ; } } } return result ;
public class Crypto { /** * This method returns a { @ link Cipher } instance , for * { @ value # CIPHER _ ALGORITHM } in { @ value # CIPHER _ MODE } mode , with padding * { @ value # CIPHER _ PADDING } . * It then initialises the { @ link Cipher } in either * { @ link Cipher # ENCRYPT _ MODE } or { @ link Cipher # DECRYPT _ MODE } ) , as specified * by the mode parameter , with the given { @ link SecretKey } . * @ param mode One of { @ link Cipher # ENCRYPT _ MODE } or * { @ link Cipher # DECRYPT _ MODE } ) . * @ param key The { @ link SecretKey } to be used with the { @ link Cipher } . * @ param iv The initialisation vector to use . * @ throws IllegalArgumentException If the given key is not a valid { @ value # CIPHER _ ALGORITHM } * key . */ private void initCipher ( Cipher cipher , int mode , SecretKey key , byte [ ] iv ) { } }
// Initialise the cipher : IvParameterSpec ivParameterSpec = new IvParameterSpec ( iv ) ; try { cipher . init ( mode , key , ivParameterSpec ) ; } catch ( InvalidKeyException e ) { // This is likely to be an invalid key size , so explain what just happened and signpost how to fix it . String message ; if ( StringUtils . containsIgnoreCase ( e . getMessage ( ) , "illegal key size" ) ) { message = "It looks like your JVM doesn't allow you to use strong 256-bit AES keys. " + "You can " ; } else { message = "Invalid key for " + CIPHER_NAME + ". NB: If the root cause of this exception is an Illegal key size, " + "you can " ; } throw new IllegalArgumentException ( message + "either use Keys.useStandardKeys() to limit key size to 128-bits, or install the " + "'Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files' " + "in your JVM to use 256-bit keys." , e ) ; } catch ( InvalidAlgorithmParameterException e ) { throw new IllegalArgumentException ( "Invalid parameter passed to initialise cipher for encryption: zero IvParameterSpec containing " + cipher . getBlockSize ( ) + " bytes." , e ) ; }
public class InternalNode { /** * Return an edge to match with input value * @ param value * @ return * @ throws nl . uva . sne . midd . UnmatchedException */ public AbstractEdge < T > match ( T value ) throws UnmatchedException , MIDDException { } }
for ( AbstractEdge < T > e : this . edges ) { if ( e . match ( value ) ) { return e ; } } throw new UnmatchedException ( "No matching edge found for value " + value ) ;
public class QuickStartSecurityRegistry { /** * { @ inheritDoc } */ @ Override public List < String > getGroupsForUser ( String userSecurityName ) throws EntryNotFoundException , RegistryException { } }
if ( userSecurityName == null ) { throw new IllegalArgumentException ( "userSecurityName is null" ) ; } if ( userSecurityName . isEmpty ( ) ) { throw new IllegalArgumentException ( "uniqueGroupId is an empty String" ) ; } if ( user . equals ( userSecurityName ) ) { return new ArrayList < String > ( ) ; } else { throw new EntryNotFoundException ( REALM_NAME + " does not support groups" ) ; }
public class Parser { /** * syck _ parser _ read */ public int read ( ) throws java . io . IOException { } }
int len = 0 ; int skip = 0 ; switch ( io_type ) { case Str : skip = moveTokens ( ) ; len = ( ( JechtIO . Str ) io ) . read . read ( buffer , ( ( JechtIO . Str ) io ) , YAML . BUFFERSIZE - 1 , skip ) ; break ; case File : skip = moveTokens ( ) ; len = ( ( JechtIO . File ) io ) . read . read ( buffer , ( ( JechtIO . File ) io ) , YAML . BUFFERSIZE - 1 , skip ) ; break ; default : break ; } checkLimit ( len ) ; return len ;
public class Utils { /** * Validates a condition , throwing a RuntimeException if the condition is violated . * @ param condition The condition . * @ param message The message for the runtime exception , with format variables as defined by * { @ link String # format ( String , Object . . . ) } . * @ param values The format arguments . */ static void require ( boolean condition , String message , Object ... values ) { } }
if ( ! condition ) { throw new RuntimeException ( String . format ( message , values ) ) ; }
public class FlacFile { /** * Opens the given file for reading */ public static FlacFile open ( File f ) throws IOException , FileNotFoundException { } }
// Open , in a way that we can skip backwards a few bytes InputStream inp = new BufferedInputStream ( new FileInputStream ( f ) , 8 ) ; FlacFile file = open ( inp ) ; return file ;
public class HungarianNotation { /** * Get the name of the field removing hungarian notation * @ param name The field name * @ return the field name without hungarian notation */ public static String removeNotation ( String name ) { } }
if ( name . matches ( "^m[A-Z]{1}" ) ) { return name . substring ( 1 , 2 ) . toLowerCase ( ) ; } else if ( name . matches ( "m[A-Z]{1}.*" ) ) { return name . substring ( 1 , 2 ) . toLowerCase ( ) + name . substring ( 2 ) ; } return name ;
public class SimpleAttachable { /** * { @ inheritDoc } */ public synchronized < T > void addToAttachmentList ( final AttachmentKey < AttachmentList < T > > key , final T value ) { } }
if ( key != null ) { final Map < AttachmentKey < ? > , Object > attachments = this . attachments ; final AttachmentList < T > list = key . cast ( attachments . get ( key ) ) ; if ( list == null ) { final AttachmentList < T > newList = new AttachmentList < T > ( ( ( ListAttachmentKey < T > ) key ) . getValueClass ( ) ) ; attachments . put ( key , newList ) ; newList . add ( value ) ; } else { list . add ( value ) ; } }
public class ToArrays { /** * Returns a method that can be used with { @ link solid . stream . Stream # collect ( Func1 ) } * to convert an iterable stream of { @ link Number } type into a primitive short [ ] array . * @ return a method that converts an iterable stream of { @ link Number } type into a primitive short [ ] array . */ public static Func1 < Iterable < Short > , short [ ] > toShorts ( ) { } }
return new Func1 < Iterable < Short > , short [ ] > ( ) { @ Override public short [ ] call ( Iterable < Short > value ) { return new QuickNumberArray ( value ) . toShorts ( ) ; } } ;
public class XConstructorCallImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case XbasePackage . XCONSTRUCTOR_CALL__CONSTRUCTOR : if ( resolve ) return getConstructor ( ) ; return basicGetConstructor ( ) ; case XbasePackage . XCONSTRUCTOR_CALL__ARGUMENTS : return getArguments ( ) ; case XbasePackage . XCONSTRUCTOR_CALL__TYPE_ARGUMENTS : return getTypeArguments ( ) ; case XbasePackage . XCONSTRUCTOR_CALL__INVALID_FEATURE_ISSUE_CODE : return getInvalidFeatureIssueCode ( ) ; case XbasePackage . XCONSTRUCTOR_CALL__VALID_FEATURE : return isValidFeature ( ) ; case XbasePackage . XCONSTRUCTOR_CALL__EXPLICIT_CONSTRUCTOR_CALL : return isExplicitConstructorCall ( ) ; case XbasePackage . XCONSTRUCTOR_CALL__ANONYMOUS_CLASS_CONSTRUCTOR_CALL : return isAnonymousClassConstructorCall ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class NtlmPasswordAuthentication { /** * Returns the effective user session key . * @ param challenge The server challenge . * @ return A < code > byte [ ] < / code > containing the effective user session key , * used in SMB MAC signing and NTLMSSP signing and sealing . */ public byte [ ] getUserSessionKey ( byte [ ] challenge ) { } }
if ( hashesExternal ) return null ; byte [ ] key = new byte [ 16 ] ; try { getUserSessionKey ( challenge , key , 0 ) ; } catch ( Exception ex ) { if ( log . level > 0 ) ex . printStackTrace ( log ) ; } return key ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcMemberTypeEnum createIfcMemberTypeEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcMemberTypeEnum result = IfcMemberTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class JaxWsDDHelper { /** * For internal usage . Can only process the PortComponent . class and WebserviceDescription . class . * @ param portLink * @ param containerToAdapt * @ param clazz * @ return * @ throws UnableToAdaptException */ @ SuppressWarnings ( "unchecked" ) private static < T > T getHighLevelElementByServiceImplBean ( String portLink , Adaptable containerToAdapt , Class < T > clazz , LinkType linkType ) throws UnableToAdaptException { } }
if ( null == portLink ) { return null ; } if ( PortComponent . class . isAssignableFrom ( clazz ) || WebserviceDescription . class . isAssignableFrom ( clazz ) ) { Webservices wsXml = containerToAdapt . adapt ( Webservices . class ) ; if ( null == wsXml ) { return null ; } for ( WebserviceDescription wsDes : wsXml . getWebServiceDescriptions ( ) ) { if ( wsDes . getPortComponents ( ) . size ( ) == 0 ) { continue ; } for ( PortComponent portCmpt : wsDes . getPortComponents ( ) ) { ServiceImplBean servImplBean = portCmpt . getServiceImplBean ( ) ; String serviceLink = LinkType . SERVLET == linkType ? servImplBean . getServletLink ( ) : servImplBean . getEJBLink ( ) ; if ( serviceLink == null ) { continue ; } else if ( serviceLink . equals ( portLink ) ) { if ( PortComponent . class . isAssignableFrom ( clazz ) ) { return ( T ) portCmpt ; } else { return ( T ) wsDes ; } } } } return null ; } return null ;
public class BELScriptLexer { /** * $ ANTLR start " NEWLINE " */ public final void mNEWLINE ( ) throws RecognitionException { } }
try { int _type = NEWLINE ; int _channel = DEFAULT_TOKEN_CHANNEL ; // BELScript . g : 297:8 : ( ( ' \ \ u000d ' ) ? ' \ \ u000a ' | ' \ \ u000d ' ) int alt11 = 2 ; int LA11_0 = input . LA ( 1 ) ; if ( ( LA11_0 == '\r' ) ) { int LA11_1 = input . LA ( 2 ) ; if ( ( LA11_1 == '\n' ) ) { alt11 = 1 ; } else { alt11 = 2 ; } } else if ( ( LA11_0 == '\n' ) ) { alt11 = 1 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 11 , 0 , input ) ; throw nvae ; } switch ( alt11 ) { case 1 : // BELScript . g : 298:5 : ( ' \ \ u000d ' ) ? ' \ \ u000a ' { // BELScript . g : 298:5 : ( ' \ \ u000d ' ) ? int alt10 = 2 ; int LA10_0 = input . LA ( 1 ) ; if ( ( LA10_0 == '\r' ) ) { alt10 = 1 ; } switch ( alt10 ) { case 1 : // BELScript . g : 298:5 : ' \ \ u000d ' { match ( '\r' ) ; } break ; } match ( '\n' ) ; } break ; case 2 : // BELScript . g : 298:26 : ' \ \ u000d ' { match ( '\r' ) ; } break ; } state . type = _type ; state . channel = _channel ; } finally { }
public class OutlookMessageParser { /** * Reads the bytes from the stream to a byte array . * @ param dstream The stream to be read from . * @ return An array of bytes . * @ throws IOException If the stream cannot be read properly . */ private byte [ ] getBytesFromStream ( final InputStream dstream ) throws IOException { } }
final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; final byte [ ] buffer = new byte [ 1024 ] ; int read ; while ( ( read = dstream . read ( buffer ) ) > 0 ) { baos . write ( buffer , 0 , read ) ; } return baos . toByteArray ( ) ;
public class BOSHClient { /** * Finds < tt > RequestProcessor < / tt > which has claimed given exchange . * @ param exch the < tt > HTTPExchange < / tt > for which < tt > RequestProcessor < / tt > * is to be found . * @ return < tt > { @ link RequestProcessor } < / tt > that has claimed given * < tt > HTTPExchange < / tt > or < tt > null < / tt > if the exchange has not been * claimed yet . */ private RequestProcessor findProcessorForExchange ( HTTPExchange exch ) { } }
assertLocked ( ) ; for ( RequestProcessor reqProc : procThreads ) { if ( exch == reqProc . procExchange ) return reqProc ; } return null ;
public class MapBasedDataMaster { /** * fixme - should be package - private */ public void readResources ( String path ) throws IOException { } }
Enumeration < URL > resources = getClass ( ) . getClassLoader ( ) . getResources ( path ) ; if ( ! resources . hasMoreElements ( ) ) { throw new IllegalArgumentException ( String . format ( "File %s was not found on classpath" , path ) ) ; } Yaml yaml = new Yaml ( ) ; while ( resources . hasMoreElements ( ) ) { appendData ( yaml . loadAs ( resources . nextElement ( ) . openStream ( ) , Data . class ) ) ; }
public class Compositions { /** * Composes an iterator of endofunctions . * @ param < T > the functions parameter and result type * @ param endodelegates to be composed ( e . g : f , g , h ) * @ return a function performing f ° g ° h */ public static < T > UnaryOperator < T > compose ( Iterator < Function < T , T > > endodelegates ) { } }
return new UnaryOperatorsComposer < T > ( ) . apply ( endodelegates ) ;
public class IdlToDSMojo { /** * Find , open and parse the IDD implied by the specified service name . Reads either an explicit * file or else a resource based on { @ link # iddAsResource } flag . * The implied name is simply the service name + " . xml " . */ private Document parseIddFromString ( String iddContent ) { } }
return XmlUtil . parse ( new ByteArrayInputStream ( iddContent . getBytes ( ) ) , resolver ) ;
public class FacebookSignatureUtil { /** * Out of the passed in < code > reqParams < / code > , extracts the parameters that * are known FacebookParams and returns them . * @ param reqParams a map of request parameters to their values * @ return a map suitable for being passed to verify signature */ public static EnumMap < FacebookParam , CharSequence > extractFacebookParams ( Map < CharSequence , CharSequence > reqParams ) { } }
if ( null == reqParams ) return null ; EnumMap < FacebookParam , CharSequence > result = new EnumMap < FacebookParam , CharSequence > ( FacebookParam . class ) ; for ( Map . Entry < CharSequence , CharSequence > entry : reqParams . entrySet ( ) ) { FacebookParam matchingFacebookParam = FacebookParam . get ( entry . getKey ( ) . toString ( ) ) ; if ( null != matchingFacebookParam ) { result . put ( matchingFacebookParam , entry . getValue ( ) ) ; } } return result ;