signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PreprocessorContext { /** * Check that there is a local variable with such name . * @ param variableName a name to be checked , can be null * @ return false if there is not such variable or it is null , true if such local variable exists */ public boolean isLocalVariable ( @ Nullable final String variableName ) { } }
boolean result = false ; if ( variableName != null ) { final String normalized = PreprocessorUtils . normalizeVariableName ( variableName ) ; result = this . localVarTable . containsKey ( normalized ) ; } return result ;
public class TreeInfo { /** * Find the declaration for a symbol , where * that symbol is defined somewhere in the given tree . */ public static JCTree declarationFor ( final Symbol sym , final JCTree tree ) { } }
class DeclScanner extends TreeScanner { JCTree result = null ; public void scan ( JCTree tree ) { if ( tree != null && result == null ) tree . accept ( this ) ; } public void visitTopLevel ( JCCompilationUnit that ) { if ( that . packge == sym ) result = that ; else super . visitTopLevel ( that ) ; } public void visitClassDef ( JCClassDecl that ) { if ( that . sym == sym ) result = that ; else super . visitClassDef ( that ) ; } public void visitMethodDef ( JCMethodDecl that ) { if ( that . sym == sym ) result = that ; else super . visitMethodDef ( that ) ; } public void visitVarDef ( JCVariableDecl that ) { if ( that . sym == sym ) result = that ; else super . visitVarDef ( that ) ; } public void visitTypeParameter ( JCTypeParameter that ) { if ( that . type != null && that . type . tsym == sym ) result = that ; else super . visitTypeParameter ( that ) ; } } DeclScanner s = new DeclScanner ( ) ; tree . accept ( s ) ; return s . result ;
public class Conversation { /** * Returns < code > true < / code > if conversation is in one of the given states */ public boolean hasState ( ConversationState ... states ) { } }
for ( ConversationState s : states ) { if ( s . equals ( state ) ) { return true ; } } return false ;
public class nspbr { /** * Use this API to update nspbr resources . */ public static base_responses update ( nitro_service client , nspbr resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { nspbr updateresources [ ] = new nspbr [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new nspbr ( ) ; updateresources [ i ] . name = resources [ i ] . name ; updateresources [ i ] . action = resources [ i ] . action ; updateresources [ i ] . srcip = resources [ i ] . srcip ; updateresources [ i ] . srcipop = resources [ i ] . srcipop ; updateresources [ i ] . srcipval = resources [ i ] . srcipval ; updateresources [ i ] . srcport = resources [ i ] . srcport ; updateresources [ i ] . srcportop = resources [ i ] . srcportop ; updateresources [ i ] . srcportval = resources [ i ] . srcportval ; updateresources [ i ] . destip = resources [ i ] . destip ; updateresources [ i ] . destipop = resources [ i ] . destipop ; updateresources [ i ] . destipval = resources [ i ] . destipval ; updateresources [ i ] . destport = resources [ i ] . destport ; updateresources [ i ] . destportop = resources [ i ] . destportop ; updateresources [ i ] . destportval = resources [ i ] . destportval ; updateresources [ i ] . nexthop = resources [ i ] . nexthop ; updateresources [ i ] . nexthopval = resources [ i ] . nexthopval ; updateresources [ i ] . iptunnel = resources [ i ] . iptunnel ; updateresources [ i ] . iptunnelname = resources [ i ] . iptunnelname ; updateresources [ i ] . srcmac = resources [ i ] . srcmac ; updateresources [ i ] . protocol = resources [ i ] . protocol ; updateresources [ i ] . protocolnumber = resources [ i ] . protocolnumber ; updateresources [ i ] . vlan = resources [ i ] . vlan ; updateresources [ i ] . Interface = resources [ i ] . Interface ; updateresources [ i ] . priority = resources [ i ] . priority ; updateresources [ i ] . msr = resources [ i ] . msr ; updateresources [ i ] . monitor = resources [ i ] . monitor ; } result = update_bulk_request ( client , updateresources ) ; } return result ;
public class FinalParameters { /** * overrides the visitor to find the source lines for the method header , to find non final parameters * @ param obj * the code object for the currently parsed method */ @ Override public void visitCode ( final Code obj ) { } }
if ( sourceLines == null ) { return ; } if ( isAbstract ) { return ; } if ( Values . STATIC_INITIALIZER . equals ( methodName ) || Values . CONSTRUCTOR . equals ( methodName ) ) { return ; } int methodStart = srcLineAnnotation . getStartLine ( ) - 2 ; int methodLine = methodStart ; String line ; while ( ( methodLine >= 0 ) && ( methodLine < sourceLines . length ) ) { line = sourceLines [ methodLine ] ; if ( line . indexOf ( methodName ) >= 0 ) { break ; } methodLine -- ; } if ( methodLine < 0 ) { return ; } for ( int i = methodLine ; i <= methodStart ; i ++ ) { if ( ( i < 0 ) || ( i >= sourceLines . length ) ) { return ; } line = sourceLines [ i ] ; if ( line . indexOf ( "final" ) >= 0 ) { return ; } } changedParms = new BitSet ( ) ; super . visitCode ( obj ) ; BugInstance bi = null ; for ( int i = 0 ; i < firstLocalReg ; i ++ ) { if ( changedParms . get ( i ) ) { changedParms . clear ( i ) ; continue ; } String parmName = getRegisterName ( obj , i ) ; if ( bi == null ) { bi = new BugInstance ( this , BugType . FP_FINAL_PARAMETERS . name ( ) , LOW_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this , 0 ) ; bugReporter . reportBug ( bi ) ; } bi . addString ( parmName ) ; } changedParms = null ;
public class PreferenceFragment { /** * Initializes the edit text dialog . */ private void initializeEditTextDialog ( ) { } }
EditTextDialog . Builder builder = new EditTextDialog . Builder ( getActivity ( ) ) . setHint ( R . string . edit_text_dialog_hint ) . setText ( getContext ( ) . getString ( R . string . edit_text_dialog_text ) ) . addValidator ( Validators . notEmpty ( getContext ( ) ) ) ; configureHeaderDialogBuilder ( builder ) ; configureButtonBarDialogBuilder ( builder ) ; editTextDialog = builder . create ( ) ;
public class JSDocInfo { /** * Gets the @ template type names . * < p > Excludes @ template types from TTL ; get those with { @ link # getTypeTransformations ( ) } */ public ImmutableList < String > getTemplateTypeNames ( ) { } }
if ( info == null || info . templateTypeNames == null ) { return ImmutableList . of ( ) ; } return ImmutableList . copyOf ( info . templateTypeNames ) ;
public class CmsPublishProject { /** * Returns the project name . < p > * @ return the project name */ private String getProjectname ( ) { } }
CmsUUID id = new CmsUUID ( getParamProjectid ( ) ) ; try { return getCms ( ) . readProject ( id ) . getName ( ) ; } catch ( CmsException e ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_SET_PROJECT_NAME_FAILED_0 ) , e ) ; } return "-error-" ;
public class StringAccessor { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . property . PropertyAccessor # toBytes ( java . lang . Object ) */ @ Override public final byte [ ] toBytes ( Object s ) throws PropertyAccessException { } }
try { return s != null ? ( ( String ) s ) . getBytes ( Constants . CHARSET_UTF8 ) : null ; } catch ( UnsupportedEncodingException e ) { log . error ( "Unsupported encoding exception, Caused by {}." , e ) ; throw new PropertyAccessException ( e ) ; }
public class ProgressTableModel { /** * / * ( non - Javadoc ) * @ see javax . swing . table . TableModel # getValueAt ( int , int ) */ public synchronized Object getValueAt ( int rowIndex , int columnIndex ) { } }
TableRowProgressMonitor monitor = monitors . get ( rowIndex ) ; switch ( columnIndex ) { case TITLE_COLUMN : return monitor . getTitle ( ) ; case PROGRESS_COLUMN : return monitor . getProgressBar ( ) ; case STATUS_COLUMN : return monitor . getStatus ( ) ; default : throw new IndexOutOfBoundsException ( ) ; }
public class CharacterBasedSegment { /** * 将中间结果转换为词网顶点 , * 这样就可以利用基于Vertex开发的功能 , 如词性标注 、 NER等 * @ param wordList * @ param appendStart * @ return */ protected List < Vertex > toVertexList ( List < Term > wordList , boolean appendStart ) { } }
ArrayList < Vertex > vertexList = new ArrayList < Vertex > ( wordList . size ( ) + 2 ) ; if ( appendStart ) vertexList . add ( Vertex . newB ( ) ) ; for ( Term word : wordList ) { CoreDictionary . Attribute attribute = guessAttribute ( word ) ; Vertex vertex = new Vertex ( word . word , attribute ) ; vertexList . add ( vertex ) ; } if ( appendStart ) vertexList . add ( Vertex . newE ( ) ) ; return vertexList ;
public class BeanPropertyTypeProvider { /** * set the property destination type for given property * @ param propertyName * @ param destinationType */ public void setPropertyDestinationType ( Class < ? > clazz , String propertyName , TypeReference < ? > destinationType ) { } }
propertiesDestinationTypes . put ( new ClassProperty ( clazz , propertyName ) , destinationType ) ;
public class ServiceTrxMessageIn { /** * Get the SOAP message body as a DOM node . * @ param message the SOAP message . * @ param bReturnCopy Return a copy of the message node , instead of the actual node . * @ return The DOM node containing the message body . */ public org . w3c . dom . Node getMessageBody ( Object rawData , boolean bReturnCopy ) { } }
return super . getMessageBody ( rawData , bReturnCopy ) ;
public class OrmWriter { private static PreparedStatement createStatementForInsert ( final Connection connection , final Introspected introspected , final FieldColumnInfo [ ] fcInfos ) throws SQLException { } }
final String sql = createStatementCache . computeIfAbsent ( introspected , key -> { final String tableName = introspected . getTableName ( ) ; final StringBuilder sqlSB = new StringBuilder ( "INSERT INTO " ) . append ( tableName ) . append ( '(' ) ; final StringBuilder sqlValues = new StringBuilder ( ") VALUES (" ) ; for ( final FieldColumnInfo fcInfo : fcInfos ) { sqlSB . append ( fcInfo . getDelimitedColumnName ( ) ) . append ( ',' ) ; sqlValues . append ( "?," ) ; } sqlValues . deleteCharAt ( sqlValues . length ( ) - 1 ) ; sqlSB . deleteCharAt ( sqlSB . length ( ) - 1 ) . append ( sqlValues ) . append ( ')' ) ; return sqlSB . toString ( ) ; } ) ; if ( introspected . hasGeneratedId ( ) ) { return connection . prepareStatement ( sql , introspected . getIdColumnNames ( ) ) ; } else { return connection . prepareStatement ( sql ) ; }
public class FastAdapterDialog { /** * Define the OnClickListener which will be used for a single item * @ param onClickListener the OnClickListener which will be used for a single item * @ return this */ public FastAdapterDialog < Item > withOnClickListener ( com . mikepenz . fastadapter . listeners . OnClickListener < Item > onClickListener ) { } }
this . mFastAdapter . withOnClickListener ( onClickListener ) ; return this ;
public class CPDefinitionUtil { /** * Returns the cp definition where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching cp definition , or < code > null < / code > if a matching cp definition could not be found */ public static CPDefinition fetchByUUID_G ( String uuid , long groupId , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByUUID_G ( uuid , groupId , retrieveFromCache ) ;
public class PassiveRole { /** * Handles an AppendRequest . */ protected CompletableFuture < AppendResponse > handleAppend ( final AppendRequest request ) { } }
CompletableFuture < AppendResponse > future = new CompletableFuture < > ( ) ; // Check that the term of the given request matches the local term or update the term . if ( ! checkTerm ( request , future ) ) { return future ; } // Check that the previous index / term matches the local log ' s last entry . if ( ! checkPreviousEntry ( request , future ) ) { return future ; } // Append the entries to the log . appendEntries ( request , future ) ; return future ;
public class Caffeine { /** * Constructs a new { @ code Caffeine } instance with the settings specified in { @ code spec } . * @ param spec a String in the format specified by { @ link CaffeineSpec } * @ return a new instance with the specification ' s settings */ @ NonNull public static Caffeine < Object , Object > from ( String spec ) { } }
return from ( CaffeineSpec . parse ( spec ) ) ;
public class ApptentiveNestedScrollView { /** * < p > The scroll range of a scroll view is the overall height of all of its * children . < / p > * @ hide */ @ Override public int computeVerticalScrollRange ( ) { } }
final int count = getChildCount ( ) ; final int contentHeight = getHeight ( ) - getPaddingBottom ( ) - getPaddingTop ( ) ; if ( count == 0 ) { return contentHeight ; } int scrollRange = getChildAt ( 0 ) . getBottom ( ) ; final int scrollY = getScrollY ( ) ; final int overscrollBottom = Math . max ( 0 , scrollRange - contentHeight ) ; if ( scrollY < 0 ) { scrollRange -= scrollY ; } else if ( scrollY > overscrollBottom ) { scrollRange += scrollY - overscrollBottom ; } return scrollRange ;
public class OutboundHandler { /** * Initializes the dst ByteBuffer with the configured size . * The buffer created is reading mode . * @ param sizeBytes the size of the dst ByteBuffer . * @ param bytes the bytes added to the buffer . Can be null if nothing * should be added . * @ throws IllegalArgumentException if the size of the buffer is too small . */ protected final void initDstBuffer ( int sizeBytes , byte [ ] bytes ) { } }
if ( bytes != null && bytes . length > sizeBytes ) { throw new IllegalArgumentException ( "Buffer overflow. Can't initialize dstBuffer for " + this + " and channel" + channel + " because too many bytes, sizeBytes " + sizeBytes + ". bytes.length " + bytes . length ) ; } ChannelOptions config = channel . options ( ) ; ByteBuffer buffer = newByteBuffer ( sizeBytes , config . getOption ( DIRECT_BUF ) ) ; if ( bytes != null ) { buffer . put ( bytes ) ; } buffer . flip ( ) ; dst = ( D ) buffer ;
public class OpenCms { /** * Notify all event listeners that a particular event has occurred . < p > * The event will be given to all registered < code > { @ link I _ CmsEventListener } < / code > objects . < p > * @ param type event type * @ param data event data */ public static void fireCmsEvent ( int type , Map < String , Object > data ) { } }
OpenCmsCore . getInstance ( ) . getEventManager ( ) . fireEvent ( type , data ) ;
public class OperationsInner { /** * Lists all of the available Azure Container Registry REST API operations . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; OperationDefinitionInner & gt ; object */ public Observable < Page < OperationDefinitionInner > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < OperationDefinitionInner > > , Page < OperationDefinitionInner > > ( ) { @ Override public Page < OperationDefinitionInner > call ( ServiceResponse < Page < OperationDefinitionInner > > response ) { return response . body ( ) ; } } ) ;
public class HashAlgorithmRegistry { /** * Registers provided { @ link HashAlgorithm } instance with the given name . Name * is not case sensitive . Any registered algorithm with the same name will be * substituted * @ param name name of the algorithm * @ param alg algorithm instance to register */ public static synchronized void registerHashAlgorithm ( String name , HashAlgorithm alg ) { } }
validateName ( name ) ; validateAlgorithm ( alg ) ; REGISTRY . put ( name . toLowerCase ( ) , alg ) ;
public class GcsConfiguration { private static String defaultProject ( ) { } }
try { File configFile ; if ( getEnvironment ( ) . containsKey ( "CLOUDSDK_CONFIG" ) ) { configFile = new File ( getEnvironment ( ) . get ( "CLOUDSDK_CONFIG" ) , "properties" ) ; } else if ( isWindows ( ) && getEnvironment ( ) . containsKey ( "APPDATA" ) ) { configFile = new File ( getEnvironment ( ) . get ( "APPDATA" ) , "gcloud/properties" ) ; } else { // New versions of gcloud use this file configFile = new File ( System . getProperty ( "user.home" ) , ".config/gcloud/configurations/config_default" ) ; if ( ! configFile . exists ( ) ) { // Old versions of gcloud use this file configFile = new File ( System . getProperty ( "user.home" ) , ".config/gcloud/properties" ) ; } } String section = null ; Pattern projectPattern = Pattern . compile ( "^project\\s*=\\s*(.*)$" ) ; Pattern sectionPattern = Pattern . compile ( "^\\[(.*)\\]$" ) ; for ( String line : Files . readLines ( configFile , StandardCharsets . UTF_8 ) ) { line = line . trim ( ) ; if ( line . isEmpty ( ) || line . startsWith ( ";" ) ) { continue ; } Matcher matcher = sectionPattern . matcher ( line ) ; if ( matcher . matches ( ) ) { section = matcher . group ( 1 ) ; } else if ( section == null || section . equals ( "core" ) ) { matcher = projectPattern . matcher ( line ) ; if ( matcher . matches ( ) ) { String project = matcher . group ( 1 ) . trim ( ) ; LOG . debug ( "Inferred default GCP project '{}' from gcloud." , project ) ; return project ; } } } } catch ( IOException e ) { LOG . error ( "Failed to infer default project." , e ) ; } // return null if can ' t determine return null ;
public class TypeVariables { /** * Returns a map from methods to return types , where the return types are not necessarily the * original return types of the methods . Consider this example : * < pre > * & # 64 ; AutoValue class { @ code Foo < T > } { * abstract T getFoo ( ) ; * & # 64 ; AutoValue . Builder * abstract class { @ code Builder < T > } { * abstract Builder setFoo ( T t ) ; * abstract { @ code Foo < T > } build ( ) ; * < / pre > * We want to be able to check that the parameter type of { @ code setFoo } is the same as the * return type of { @ code getFoo } . But in fact it isn ' t , because the { @ code T } of { @ code Foo < T > } * is not the same as the { @ code T } of { @ code Foo . Builder < T > } . So we create a parallel * { @ code Foo < T > } where the { @ code T } < i > is < / i > the one from { @ code Foo . Builder < T > } . That way the * types do correspond . This method then returns the return types of the given methods as they * appear in that parallel class , meaning the type given for { @ code getFoo ( ) } is the { @ code T } of * { @ code Foo . Builder < T > } . * < p > We do the rewrite this way around ( applying the type parameter from { @ code Foo . Builder } to * { @ code Foo } ) because if we hit one of the historical Eclipse bugs with { @ link Types # asMemberOf } * then { @ link EclipseHack # methodReturnType } can use fallback logic , which only works for methods * with no arguments . * @ param methods the methods whose return types are to be rewritten . * @ param sourceType the class containing those methods ( { @ code Foo } in the example ) . * @ param targetType the class to translate the methods into ( { @ code Foo . Builder < T > } ) in the * example . */ static ImmutableMap < ExecutableElement , TypeMirror > rewriteReturnTypes ( Elements elementUtils , Types typeUtils , Collection < ExecutableElement > methods , TypeElement sourceType , TypeElement targetType ) { } }
List < ? extends TypeParameterElement > sourceTypeParameters = sourceType . getTypeParameters ( ) ; List < ? extends TypeParameterElement > targetTypeParameters = targetType . getTypeParameters ( ) ; Preconditions . checkArgument ( sourceTypeParameters . toString ( ) . equals ( targetTypeParameters . toString ( ) ) , "%s != %s" , sourceTypeParameters , targetTypeParameters ) ; // What we ' re doing is only valid if the type parameters are " the same " . The check here even // requires the names to be the same . The logic would still work without that , but we impose // that requirement elsewhere and it means we can check in this simple way . EclipseHack eclipseHack = new EclipseHack ( elementUtils , typeUtils ) ; TypeMirror [ ] targetTypeParameterMirrors = new TypeMirror [ targetTypeParameters . size ( ) ] ; for ( int i = 0 ; i < targetTypeParameters . size ( ) ; i ++ ) { targetTypeParameterMirrors [ i ] = targetTypeParameters . get ( i ) . asType ( ) ; } DeclaredType parallelSource = typeUtils . getDeclaredType ( sourceType , targetTypeParameterMirrors ) ; return methods . stream ( ) . collect ( ImmutableMap . toImmutableMap ( m -> m , m -> eclipseHack . methodReturnType ( m , parallelSource ) ) ) ;
public class StorageClientFactory { /** * Creates a SnowflakeAzureClientObject which encapsulates * the Azure Storage client * @ param stage Stage information * @ param encMat encryption material for the client * @ return the SnowflakeS3Client instance created */ private SnowflakeAzureClient createAzureClient ( StageInfo stage , RemoteStoreFileEncryptionMaterial encMat ) throws SnowflakeSQLException { } }
logger . debug ( "createAzureClient encryption={}" , ( encMat == null ? "no" : "yes" ) ) ; // TODO : implement support for encryption SNOW - 33042 SnowflakeAzureClient azureClient ; try { azureClient = SnowflakeAzureClient . createSnowflakeAzureClient ( stage , encMat ) ; } catch ( Exception ex ) { logger . debug ( "Exception creating Azure Storage client" , ex ) ; throw ex ; } logger . debug ( "Azure Storage client created" ) ; return azureClient ;
public class JCloudsStorage { /** * { @ inheritDoc } */ @ Override public IBackendReader getReader ( ) throws TTException { } }
// setup the container name used by the provider ( like bucket in S3) return new JCloudsReader ( mBlobStore , mFac , mByteHandler , mProperties . getProperty ( ConstructorProps . RESOURCE ) ) ;
public class aaagroup_auditsyslogpolicy_binding { /** * Use this API to fetch aaagroup _ auditsyslogpolicy _ binding resources of given name . */ public static aaagroup_auditsyslogpolicy_binding [ ] get ( nitro_service service , String groupname ) throws Exception { } }
aaagroup_auditsyslogpolicy_binding obj = new aaagroup_auditsyslogpolicy_binding ( ) ; obj . set_groupname ( groupname ) ; aaagroup_auditsyslogpolicy_binding response [ ] = ( aaagroup_auditsyslogpolicy_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class JsonSerializationContext { /** * Trace an error with current writer state and returns a corresponding exception . * @ param value current value * @ param cause cause of the error * @ param writer current writer * @ return a { @ link JsonSerializationException } if we wrap the exceptions , the cause otherwise */ public RuntimeException traceError ( Object value , RuntimeException cause , JsonWriter writer ) { } }
RuntimeException exception = traceError ( value , cause ) ; traceWriterInfo ( value , writer ) ; return exception ;
public class ServiceAdmin { @ GET @ Path ( "node" ) @ Produces ( MediaType . APPLICATION_JSON ) @ HttpCache ( "public, max-age=60" ) public List < NodeDto > getNodes ( ) { } }
DbConn cnx = null ; try { cnx = Helpers . getDbSession ( ) ; return MetaService . getNodes ( cnx ) ; } finally { Helpers . closeQuietly ( cnx ) ; }
public class MouseHeadless { /** * Updatable */ @ Override public void update ( double extrp ) { } }
mx = x - oldX ; my = y - oldY ; oldX = x ; oldY = y ;
public class SDVariable { /** * The gradient variable is the variable that represents the derivative of the loss function with respect * to the output of this variable . I . e . , if this variable is X and loss function is L , then gradient ( ) returns the * variable representing dL / dX < br > * Note that only floating point variables can have gradients . < br > * Note also that a gradient may not yet be defined , and / or if no loss function variables have been set . < br > * You can set the loss function variables using { @ link SameDiff # setLossVariables ( String . . . ) } and then create the * gradient functions using { @ link SameDiff # createGradFunction ( ) } . Alternatively , the gradient function will be * created automatically when training is performed . */ public SDVariable getGradient ( ) { } }
Preconditions . checkState ( dataType ( ) . isFPType ( ) , "Cannot get gradient of %s variable \"%s\": only floating" + " point variables have gradients" , getVarName ( ) , dataType ( ) ) ; return sameDiff . getGradForVariable ( getVarName ( ) ) ;
public class ZipUtils { /** * Encodes the given byte array and then GZIP compresses it . * @ param value byte array input * @ return compressed byte array output * @ throws IOException */ public static byte [ ] compressBytesNonBase64 ( byte [ ] value ) throws IOException { } }
ByteArrayOutputStream baos = new ByteArrayOutputStream ( value . length ) ; try ( GZIPOutputStream gos = new GZIPOutputStream ( baos ) ) { gos . write ( value ) ; } byte [ ] compressed = baos . toByteArray ( ) ; baos . close ( ) ; return compressed ;
public class FilesystemIterator { /** * Determines if a path is a possible file system root */ protected boolean isFilesystemRoot ( String filename ) throws IOException { } }
String [ ] roots = getFilesystemRoots ( ) ; for ( int c = 0 , len = roots . length ; c < len ; c ++ ) { if ( roots [ c ] . equals ( filename ) ) return true ; } return false ;
public class EventRegister { /** * Register event bus for views . */ void registerEventBuses ( ) { } }
if ( ! eventsRegistered ) { Mvc . graph ( ) . inject ( this ) ; eventBusV . register ( androidComponent ) ; eventsRegistered = true ; logger . trace ( "+Event2V bus registered for view - '{}'." , androidComponent . getClass ( ) . getSimpleName ( ) ) ; } else { logger . trace ( "!Event2V bus already registered for view - '{}' and its controllers." , androidComponent . getClass ( ) . getSimpleName ( ) ) ; }
public class TripleGeneratorBase { /** * Helper methods for adding triples */ protected void add ( SubjectNode subject , RDFName predicate , ObjectNode object , Set < Triple > set ) throws ResourceIndexException { } }
set . add ( new SimpleTriple ( subject , predicate , object ) ) ;
public class AppendBlockHeader { public void write ( DataOutput out ) throws IOException { } }
if ( getDataTransferVersion ( ) >= DataTransferProtocol . FEDERATION_VERSION ) { out . writeInt ( namespaceId ) ; } out . writeLong ( blockId ) ; out . writeLong ( numBytes ) ; out . writeLong ( genStamp ) ; out . writeInt ( pipelineDepth ) ; getWritePipelineInfo ( ) . write ( getDataTransferVersion ( ) , out ) ;
public class ipset { /** * Use this API to add ipset . */ public static base_response add ( nitro_service client , ipset resource ) throws Exception { } }
ipset addresource = new ipset ( ) ; addresource . name = resource . name ; addresource . td = resource . td ; return addresource . add_resource ( client ) ;
public class CmsVfsSitemapService { /** * Returns the sitemap children for the given path with all descendants up to the given level or to the given target path , ie . * < dl > < dt > levels = 1 < dd > only children < dt > levels = 2 < dd > children and great children < / dl > * and so on . < p > * @ param root the site relative root * @ param levels the levels to recurse * @ param targetPath the target path * @ return the sitemap children */ private List < CmsClientSitemapEntry > getChildren ( String root , int levels , String targetPath ) { } }
List < CmsClientSitemapEntry > children = new ArrayList < CmsClientSitemapEntry > ( ) ; int i = 0 ; for ( CmsJspNavElement navElement : getNavBuilder ( ) . getNavigationForFolder ( root , Visibility . all , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ) { try { CmsClientSitemapEntry child = toClientEntry ( navElement , false ) ; if ( child != null ) { child . setPosition ( i ) ; children . add ( child ) ; int nextLevels = levels ; if ( ( nextLevels == 2 ) && ( targetPath != null ) && targetPath . startsWith ( child . getSitePath ( ) ) ) { nextLevels = 3 ; } if ( child . isFolderType ( ) && ( ( nextLevels > 1 ) || ( nextLevels == - 1 ) ) && ! isSubSitemap ( navElement ) ) { child . setSubEntries ( getChildren ( child . getSitePath ( ) , nextLevels - 1 , targetPath ) , null ) ; child . setChildrenLoadedInitially ( true ) ; } i ++ ; } } catch ( CmsException e ) { LOG . error ( "Could not read sitemap entry." , e ) ; } } return children ;
public class ChisquareSelect { /** * { @ inheritDoc } */ @ Override protected void estimateFeatureScores ( Map < Object , Double > featureScores , int N , Map < Object , Integer > classCounts , Map < List < Object > , Integer > featureClassCounts , Map < Object , Double > featureCounts ) { } }
logger . debug ( "estimateFeatureScores()" ) ; double criticalValue = ContinuousDistributions . chisquareInverseCdf ( knowledgeBase . getTrainingParameters ( ) . getALevel ( ) , 1 ) ; // one degree of freedom because the tables below are 2x2 streamExecutor . forEach ( StreamMethods . stream ( featureCounts . entrySet ( ) . stream ( ) , isParallelized ( ) ) , featureCount -> { Object feature = featureCount . getKey ( ) ; double N1_ = featureCount . getValue ( ) ; // calculate the N1 . ( number of records that has the feature ) double N0_ = N - N1_ ; // also the N0 . ( number of records that DONT have the feature ) double bestScore = Double . NEGATIVE_INFINITY ; DataTable2D contingencyTable = new DataTable2D ( ) ; contingencyTable . put ( 0 , new AssociativeArray ( ) ) ; contingencyTable . put ( 1 , new AssociativeArray ( ) ) ; for ( Map . Entry < Object , Integer > classCount : classCounts . entrySet ( ) ) { Object theClass = classCount . getKey ( ) ; Integer featureClassC = featureClassCounts . get ( Arrays . asList ( feature , theClass ) ) ; double N11 = ( featureClassC != null ) ? featureClassC . doubleValue ( ) : 0.0 ; // N11 is the number of records that have the feature and belong on the specific class double N01 = classCount . getValue ( ) - N11 ; // N01 is the total number of records that do not have the particular feature BUT they belong to the specific class double N00 = N0_ - N01 ; double N10 = N1_ - N11 ; contingencyTable . put2d ( 0 , 0 , N00 ) ; contingencyTable . put2d ( 0 , 1 , N01 ) ; contingencyTable . put2d ( 1 , 0 , N10 ) ; contingencyTable . put2d ( 1 , 1 , N11 ) ; double scorevalue = Chisquare . getScoreValue ( contingencyTable ) ; // contingencyTable = null ; if ( scorevalue > bestScore ) { bestScore = scorevalue ; } } if ( bestScore >= criticalValue ) { // if the score is larger than the critical value , then select the feature featureScores . put ( feature , bestScore ) ; // This Map is concurrent and there are no overlaping keys between threads } } ) ;
public class CloudFile { /** * 保存文件到云上 , 阻塞直到保存成功返回 * @ param path 文件相对路径 * @ param data 文件内容 * @ return successful / unsuccessful unit response */ @ SuppressWarnings ( "unused" ) public static Completable save ( String path , String data ) { } }
Map < String , Object > map = new HashMap < String , Object > ( ) { { put ( "path" , path ) ; put ( "data" , data ) ; } } ; return SingleRxXian . call ( "cosService" , "cosWrite" , map ) . toCompletable ( ) ;
public class HashUserRealm { public void dump ( PrintStream out ) { } }
out . println ( this + ":" ) ; out . println ( super . toString ( ) ) ; out . println ( _roles ) ;
public class FbBotMillNetworkController { /** * POSTs a messenger code as a JSON string to Facebook . * @ param input * the JSON data to send . * @ return the uploaded attachment ID . */ public static MessengerCode postMessengerCode ( Object input ) { } }
StringEntity stringEntity = toStringEntity ( input ) ; return postMessengerCode ( stringEntity ) ;
public class ZipUtil { /** * Reads the given ZIP file and executes the given action for each given entry . * For each given entry the corresponding input stream is also passed to the action . If you want to stop the loop then throw a ZipBreakException . * @ param zip * input ZIP file . * @ param entryNames * names of entries to iterate * @ param action * action to be called for each entry . * @ param charset * charset used to process the zip file * @ see ZipEntryCallback * @ see # iterate ( File , String [ ] , ZipInfoCallback ) */ public static void iterate ( File zip , String [ ] entryNames , ZipEntryCallback action , Charset charset ) { } }
ZipFile zf = null ; try { if ( charset == null ) { zf = new ZipFile ( zip ) ; } else { zf = new ZipFile ( zip , charset ) ; } for ( int i = 0 ; i < entryNames . length ; i ++ ) { ZipEntry e = zf . getEntry ( entryNames [ i ] ) ; if ( e == null ) { continue ; } InputStream is = zf . getInputStream ( e ) ; try { action . process ( is , e ) ; } catch ( IOException ze ) { throw new ZipException ( "Failed to process zip entry '" + e . getName ( ) + " with action " + action , ze ) ; } catch ( ZipBreakException ex ) { break ; } finally { IOUtils . closeQuietly ( is ) ; } } } catch ( IOException e ) { throw ZipExceptionUtil . rethrow ( e ) ; } finally { closeQuietly ( zf ) ; }
public class JobConfig { /** * < b > WARNING : < / b > You shouldn ' t call this method . It only exists for testing and debugging * purposes . The { @ link JobManager } automatically decides which API suits best for a { @ link Job } . * @ param api The { @ link JobApi } which will be used for future scheduled JobRequests . */ public static void forceApi ( @ NonNull JobApi api ) { } }
for ( JobApi jobApi : JobApi . values ( ) ) { ENABLED_APIS . put ( jobApi , jobApi == api ) ; } CAT . w ( "forceApi - %s" , api ) ;
public class DescribeConditionalForwardersRequest { /** * The fully qualified domain names ( FQDN ) of the remote domains for which to get the list of associated conditional * forwarders . If this member is null , all conditional forwarders are returned . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setRemoteDomainNames ( java . util . Collection ) } or { @ link # withRemoteDomainNames ( java . util . Collection ) } if * you want to override the existing values . * @ param remoteDomainNames * The fully qualified domain names ( FQDN ) of the remote domains for which to get the list of associated * conditional forwarders . If this member is null , all conditional forwarders are returned . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeConditionalForwardersRequest withRemoteDomainNames ( String ... remoteDomainNames ) { } }
if ( this . remoteDomainNames == null ) { setRemoteDomainNames ( new com . amazonaws . internal . SdkInternalList < String > ( remoteDomainNames . length ) ) ; } for ( String ele : remoteDomainNames ) { this . remoteDomainNames . add ( ele ) ; } return this ;
public class AbstractZKClient { /** * Returns both the data as a byte [ ] as well as the stat */ @ Override public ZKData < byte [ ] > getZKByteData ( String path ) throws InterruptedException , KeeperException { } }
return getZKByteData ( path , null ) ;
public class DependencyFinder { /** * Parses all class files from the given archive stream and returns * all target locations . */ public Set < Location > parse ( Stream < ? extends Archive > archiveStream ) { } }
archiveStream . forEach ( archive -> parse ( archive , CLASS_FINDER ) ) ; return waitForTasksCompleted ( ) ;
public class CoverageDataCore { /** * Get the coverage data value for the unsigned short pixel value * @ param griddedTile * gridded tile * @ param unsignedPixelValue * pixel value as an unsigned 16 bit integer * @ return coverage data value */ public Double getValue ( GriddedTile griddedTile , int unsignedPixelValue ) { } }
Double value = null ; if ( ! isDataNull ( unsignedPixelValue ) ) { value = pixelValueToValue ( griddedTile , new Double ( unsignedPixelValue ) ) ; } return value ;
public class Encoder { /** * Re - encode a multimedia file . * This method is not reentrant , instead create multiple object instances * @ param multimediaObject The source multimedia file . It cannot be null . Be * sure this file can be decoded ( see null null null null { @ link Encoder # getSupportedDecodingFormats ( ) } , * { @ link Encoder # getAudioDecoders ( ) } and * { @ link Encoder # getVideoDecoders ( ) } ) . * @ param target The target multimedia re - encoded file . It cannot be null . * If this file already exists , it will be overwrited . * @ param attributes A set of attributes for the encoding process . * @ param listener An optional progress listener for the encoding process . * It can be null . * @ throws IllegalArgumentException If both audio and video parameters are * null . * @ throws InputFormatException If the source multimedia file cannot be * decoded . * @ throws EncoderException If a problems occurs during the encoding * process . */ public void encode ( MultimediaObject multimediaObject , File target , EncodingAttributes attributes , EncoderProgressListener listener ) throws IllegalArgumentException , InputFormatException , EncoderException { } }
String formatAttribute = attributes . getFormat ( ) ; Float offsetAttribute = attributes . getOffset ( ) ; Float durationAttribute = attributes . getDuration ( ) ; AudioAttributes audioAttributes = attributes . getAudioAttributes ( ) ; VideoAttributes videoAttributes = attributes . getVideoAttributes ( ) ; if ( audioAttributes == null && videoAttributes == null ) { throw new IllegalArgumentException ( "Both audio and video attributes are null" ) ; } target = target . getAbsoluteFile ( ) ; target . getParentFile ( ) . mkdirs ( ) ; ffmpeg = locator . createExecutor ( ) ; if ( offsetAttribute != null ) { ffmpeg . addArgument ( "-ss" ) ; ffmpeg . addArgument ( String . valueOf ( offsetAttribute . floatValue ( ) ) ) ; } ffmpeg . addArgument ( "-i" ) ; ffmpeg . addArgument ( multimediaObject . getFile ( ) . getAbsolutePath ( ) ) ; if ( durationAttribute != null ) { ffmpeg . addArgument ( "-t" ) ; ffmpeg . addArgument ( String . valueOf ( durationAttribute . floatValue ( ) ) ) ; } if ( videoAttributes == null ) { ffmpeg . addArgument ( "-vn" ) ; } else { String codec = videoAttributes . getCodec ( ) ; if ( codec != null ) { ffmpeg . addArgument ( "-vcodec" ) ; ffmpeg . addArgument ( codec ) ; } String tag = videoAttributes . getTag ( ) ; if ( tag != null ) { ffmpeg . addArgument ( "-vtag" ) ; ffmpeg . addArgument ( tag ) ; } Integer bitRate = videoAttributes . getBitRate ( ) ; if ( bitRate != null ) { ffmpeg . addArgument ( "-vb" ) ; ffmpeg . addArgument ( String . valueOf ( bitRate . intValue ( ) ) ) ; } Integer frameRate = videoAttributes . getFrameRate ( ) ; if ( frameRate != null ) { ffmpeg . addArgument ( "-r" ) ; ffmpeg . addArgument ( String . valueOf ( frameRate . intValue ( ) ) ) ; } VideoSize size = videoAttributes . getSize ( ) ; if ( size != null ) { ffmpeg . addArgument ( "-s" ) ; ffmpeg . addArgument ( String . valueOf ( size . getWidth ( ) ) + "x" + String . valueOf ( size . getHeight ( ) ) ) ; } if ( videoAttributes . isFaststart ( ) ) { ffmpeg . addArgument ( "-movflags" ) ; ffmpeg . addArgument ( "faststart" ) ; } if ( videoAttributes . getX264Profile ( ) != null ) { ffmpeg . addArgument ( "-profile:v" ) ; ffmpeg . addArgument ( videoAttributes . getX264Profile ( ) . getModeName ( ) ) ; } if ( videoAttributes . getVideoFilters ( ) . size ( ) > 0 ) { for ( VideoFilter videoFilter : videoAttributes . getVideoFilters ( ) ) { ffmpeg . addArgument ( "-vf" ) ; ffmpeg . addArgument ( videoFilter . getExpression ( ) ) ; } } Integer quality = videoAttributes . getQuality ( ) ; if ( quality != null ) { ffmpeg . addArgument ( "-qscale:v" ) ; ffmpeg . addArgument ( String . valueOf ( quality . intValue ( ) ) ) ; } } if ( audioAttributes == null ) { ffmpeg . addArgument ( "-an" ) ; } else { String codec = audioAttributes . getCodec ( ) ; if ( codec != null ) { ffmpeg . addArgument ( "-acodec" ) ; ffmpeg . addArgument ( codec ) ; } Integer bitRate = audioAttributes . getBitRate ( ) ; if ( bitRate != null ) { ffmpeg . addArgument ( "-ab" ) ; ffmpeg . addArgument ( String . valueOf ( bitRate . intValue ( ) ) ) ; } Integer channels = audioAttributes . getChannels ( ) ; if ( channels != null ) { ffmpeg . addArgument ( "-ac" ) ; ffmpeg . addArgument ( String . valueOf ( channels . intValue ( ) ) ) ; } Integer samplingRate = audioAttributes . getSamplingRate ( ) ; if ( samplingRate != null ) { ffmpeg . addArgument ( "-ar" ) ; ffmpeg . addArgument ( String . valueOf ( samplingRate . intValue ( ) ) ) ; } Integer volume = audioAttributes . getVolume ( ) ; if ( volume != null ) { ffmpeg . addArgument ( "-vol" ) ; ffmpeg . addArgument ( String . valueOf ( volume . intValue ( ) ) ) ; } Integer quality = audioAttributes . getQuality ( ) ; if ( quality != null ) { ffmpeg . addArgument ( "-qscale:a" ) ; ffmpeg . addArgument ( String . valueOf ( quality . intValue ( ) ) ) ; } } if ( formatAttribute != null ) { ffmpeg . addArgument ( "-f" ) ; ffmpeg . addArgument ( formatAttribute ) ; } ffmpeg . addArgument ( "-y" ) ; ffmpeg . addArgument ( target . getAbsolutePath ( ) ) ; if ( attributes . isMapMetaData ( ) ) { // Copy over meta data if possible ffmpeg . addArgument ( "-map_metadata" ) ; ffmpeg . addArgument ( "0" ) ; } // ffmpeg . addArgument ( " - loglevel " ) ; // ffmpeg . addArgument ( " warning " ) ; / / Only report errors try { ffmpeg . execute ( ) ; } catch ( IOException e ) { throw new EncoderException ( e ) ; } try { String lastWarning = null ; long duration ; RBufferedReader reader = new RBufferedReader ( new InputStreamReader ( ffmpeg . getErrorStream ( ) ) ) ; MultimediaInfo info = multimediaObject . getInfo ( ) ; if ( durationAttribute != null ) { duration = ( long ) Math . round ( ( durationAttribute * 1000L ) ) ; } else { duration = info . getDuration ( ) ; if ( offsetAttribute != null ) { duration -= ( long ) Math . round ( ( offsetAttribute * 1000L ) ) ; } } if ( listener != null ) { listener . sourceInfo ( info ) ; } String line ; ConversionOutputAnalyzer outputAnalyzer = new ConversionOutputAnalyzer ( duration , listener ) ; while ( ( line = reader . readLine ( ) ) != null ) { outputAnalyzer . analyzeNewLine ( line ) ; } if ( outputAnalyzer . getLastWarning ( ) != null ) { if ( ! SUCCESS_PATTERN . matcher ( lastWarning ) . matches ( ) ) { throw new EncoderException ( "No match for: " + SUCCESS_PATTERN + " in " + lastWarning ) ; } } unhandledMessages = outputAnalyzer . getUnhandledMessages ( ) ; int exitCode = ffmpeg . getProcessExitCode ( ) ; if ( exitCode != 0 ) { LOG . error ( "Process exit code: " + exitCode + " for " + multimediaObject . getFile ( ) . getName ( ) + " to " + target . getName ( ) ) ; throw new EncoderException ( "Exit code of ffmpeg encoding run is " + exitCode ) ; } } catch ( IOException e ) { throw new EncoderException ( e ) ; } finally { ffmpeg . destroy ( ) ; ffmpeg = null ; }
public class XMLSystemProperties { /** * The same as { @ link # setXMLMaxOccur ( int ) } but just that the value is only * set , if the limit is increased ! * @ param nMaxOccur * A positive integer . Values & le ; 0 are treated as no limit . * @ since 8.6.2 * @ see # setXMLMaxOccur ( int ) */ public static void setXMLMaxOccurIfLarger ( final int nMaxOccur ) { } }
final int nOldValue = getXMLMaxOccur ( ) ; if ( nOldValue > 0 ) { // Current value is limited if ( nMaxOccur <= 0 || nMaxOccur > nOldValue ) { // New value is unlimited or higher setXMLMaxOccur ( nMaxOccur ) ; } } // else - > cannot be increased
public class SnowflakeUtil { /** * Check the error in the JSON node and generate an exception based on * information extracted from the node . * @ param rootNode json object contains error information * @ param raiseReauthenticateError raises SnowflakeReauthenticationRequest * if true * @ throws SnowflakeSQLException the exception get from the error in the json */ static private void checkErrorAndThrowExceptionSub ( JsonNode rootNode , boolean raiseReauthenticateError ) throws SnowflakeSQLException { } }
// no need to throw exception if success if ( rootNode . path ( "success" ) . asBoolean ( ) ) { return ; } String errorMessage ; String sqlState ; int errorCode ; String queryId = "unknown" ; // if we have sqlstate in data , it ' s a sql error if ( ! rootNode . path ( "data" ) . path ( "sqlState" ) . isMissingNode ( ) ) { sqlState = rootNode . path ( "data" ) . path ( "sqlState" ) . asText ( ) ; errorCode = rootNode . path ( "data" ) . path ( "errorCode" ) . asInt ( ) ; queryId = rootNode . path ( "data" ) . path ( "queryId" ) . asText ( ) ; errorMessage = rootNode . path ( "message" ) . asText ( ) ; } else { sqlState = SqlState . INTERNAL_ERROR ; // use internal error sql state // check if there is an error code in the envelope if ( ! rootNode . path ( "code" ) . isMissingNode ( ) ) { errorCode = rootNode . path ( "code" ) . asInt ( ) ; errorMessage = rootNode . path ( "message" ) . asText ( ) ; } else { errorCode = ErrorCode . INTERNAL_ERROR . getMessageCode ( ) ; errorMessage = "no_error_code_from_server" ; try { PrintWriter writer = new PrintWriter ( "output.json" , "UTF-8" ) ; writer . print ( rootNode . toString ( ) ) ; } catch ( Exception ex ) { logger . debug ( "{}" , ex ) ; } } } if ( raiseReauthenticateError ) { switch ( errorCode ) { case ID_TOKEN_EXPIRED_GS_CODE : case SESSION_NOT_EXIST_GS_CODE : case MASTER_TOKEN_NOTFOUND : case MASTER_EXPIRED_GS_CODE : case MASTER_TOKEN_INVALID_GS_CODE : throw new SnowflakeReauthenticationRequest ( queryId , errorMessage , sqlState , errorCode ) ; } } throw new SnowflakeSQLException ( queryId , errorMessage , sqlState , errorCode ) ;
public class ConcurrentHashMap { /** * Retrieve an object from the map based on its key . * @ param key for the Object to be removed . * @ return Object matching the key . */ public final Object get ( Object key ) { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "get" , "key=" + key + "Object" ) ; java . util . Map subMap = getSubMap ( key ) ; Object returnValue ; synchronized ( subMap ) { returnValue = subMap . get ( key ) ; } // synchronized ( subMap ) . if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "get" , "returns retuenVaue=" + returnValue + "(Object)" ) ; return returnValue ;
public class CmsSitemapController { /** * Returns the parent entry of a sitemap entry , or null if it is the root entry . < p > * @ param entry a sitemap entry * @ return the parent entry or null */ public CmsClientSitemapEntry getParentEntry ( CmsClientSitemapEntry entry ) { } }
String path = entry . getSitePath ( ) ; String parentPath = CmsResource . getParentFolder ( path ) ; if ( parentPath == null ) { return null ; } return getEntry ( parentPath ) ;
public class AccessibilityNodeInfoUtils { /** * Determines if the current item is at the edge of a list by checking the * scrollable predecessors of the items on either or both sides . * @ param context The parent context . * @ param node The node to check . * @ param direction The direction in which to check , one of : * < ul > * < li > { @ code - 1 } to check backward * < li > { @ code 0 } to check both backward and forward * < li > { @ code 1 } to check forward * < / ul > * @ param filter ( Optional ) Filter used to validate list - type ancestors . * @ return true if the current item is at the edge of a list . */ public static boolean isEdgeListItem ( Context context , AccessibilityNodeInfoCompat node , int direction , NodeFilter filter ) { } }
if ( node == null ) { return false ; } if ( ( direction <= 0 ) && isMatchingEdgeListItem ( context , node , NodeFocusFinder . SEARCH_BACKWARD , FILTER_SCROLL_BACKWARD . and ( filter ) ) ) { return true ; } if ( ( direction >= 0 ) && isMatchingEdgeListItem ( context , node , NodeFocusFinder . SEARCH_FORWARD , FILTER_SCROLL_FORWARD . and ( filter ) ) ) { return true ; } return false ;
public class DeploymentNode { /** * Adds a container instance to this deployment node , optionally replicating all of the container - container relationships . * @ param container the Container to add an instance of * @ param replicateContainerRelationships true if the container - container relationships should be replicated between the container instances , false otherwise * @ return a ContainerInstance object */ public ContainerInstance add ( Container container , boolean replicateContainerRelationships ) { } }
ContainerInstance containerInstance = getModel ( ) . addContainerInstance ( this , container , replicateContainerRelationships ) ; this . containerInstances . add ( containerInstance ) ; return containerInstance ;
public class Resources { /** * Gets the int . * @ param key the key * @ return the int */ public int getInt ( String key ) { } }
try { return Integer . parseInt ( resource . getString ( key ) ) ; } catch ( NumberFormatException e ) { return - 1 ; // modify here if you need to throw a Exception here }
public class CountStatisticImpl { /** * Non - Synchronizable : counter is " replaced " with the input value . Caller should synchronize . */ public void set ( long count , long startTime , long lastSampleTime ) { } }
this . count = count ; this . startTime = startTime ; this . lastSampleTime = lastSampleTime ;
public class BizwifiAPI { /** * Wi - Fi数据统计 * 查询一定时间范围内的WiFi连接总人数 、 微信方式连Wi - Fi人数 、 商家主页访问人数 、 连网后消息发送人数 、 新增公众号关注人数和累计公众号关注人数 。 * 查询的最长时间跨度为30天 。 * @ param accessToken accessToken * @ param statisticsList statisticsList * @ return StatisticsListResult */ public static StatisticsListResult statisticsList ( String accessToken , StatisticsList statisticsList ) { } }
return statisticsList ( accessToken , JsonUtil . toJSONString ( statisticsList ) ) ;
public class BaseJsonBo { /** * Get a sub - attribute using d - path . * @ param attrName * @ param dPath * @ param clazz * @ return * @ see DPathUtils */ public < T > T getSubAttr ( String attrName , String dPath , Class < T > clazz ) { } }
Lock lock = lockForRead ( ) ; try { return JacksonUtils . getValue ( getAttribute ( attrName ) , dPath , clazz ) ; } finally { lock . unlock ( ) ; }
public class PropertiesBasedGreenMailConfigurationBuilder { /** * Builds a configuration object based on given properties . * @ param properties the properties . * @ return a configuration and never null . */ public GreenMailConfiguration build ( Properties properties ) { } }
GreenMailConfiguration configuration = new GreenMailConfiguration ( ) ; String usersParam = properties . getProperty ( GREENMAIL_USERS ) ; if ( null != usersParam ) { String [ ] usersArray = usersParam . split ( "," ) ; for ( String user : usersArray ) { extractAndAddUser ( configuration , user ) ; } } String disabledAuthentication = properties . getProperty ( GREENMAIL_AUTH_DISABLED ) ; if ( null != disabledAuthentication ) { configuration . withDisabledAuthentication ( ) ; } return configuration ;
public class GetTagValuesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetTagValuesRequest getTagValuesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getTagValuesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getTagValuesRequest . getPaginationToken ( ) , PAGINATIONTOKEN_BINDING ) ; protocolMarshaller . marshall ( getTagValuesRequest . getKey ( ) , KEY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BinaryValue { /** * Update with < code > length < / code > bytes from the specified InputStream < code > stream < / code > to * this binary value at < code > position < / code > * @ param stream * the data . * @ param length * the number of bytes from buffer to write . * @ param position * position in file to write data */ public void update ( InputStream stream , long length , long position ) throws IOException , RepositoryException { } }
if ( ! changed ) { internalData = createEditableCopy ( this . getInternalData ( ) ) ; } ( ( EditableValueData ) internalData ) . update ( stream , length , position ) ; invalidateStream ( ) ;
public class JFeatureSpec { /** * Java wrapper for { @ link FeatureSpec # extractWithSettings ( String , FeatureBuilder , ClassTag ) . */ public JRecordExtractor < T , SparseLabeledPoint > extractWithSettingsSparseLabeledPoint ( String settings ) { } }
return new JRecordExtractor < > ( JavaOps . extractWithSettingsSparseLabeledPoint ( self , settings ) ) ;
public class StackInspector { /** * Inspect the given { @ link Class } for all { @ link Facet } types from all { @ link FacetConstraint } declarations . This * method inspects the entire constraint tree . */ public static < FACETTYPE extends Facet < ? > > Set < Class < FACETTYPE > > getAllRelatedFacets ( final Class < ? > inspectedType ) { } }
Set < Class < FACETTYPE > > seen = new LinkedHashSet < Class < FACETTYPE > > ( ) ; return getAllRelatedFacets ( seen , inspectedType ) ;
public class Event { /** * setter for causes _ event - sets * @ generated * @ param v value to set into the feature */ public void setCauses_event ( FSArray v ) { } }
if ( Event_Type . featOkTst && ( ( Event_Type ) jcasType ) . casFeat_causes_event == null ) jcasType . jcas . throwFeatMissing ( "causes_event" , "ch.epfl.bbp.uima.genia.Event" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( Event_Type ) jcasType ) . casFeatCode_causes_event , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class HpelFormatter { /** * Sets the formatter locale and the dateFormat that will be used to localize a log record being formatted . The formatter locale will be used * when { @ link # formatRecord ( RepositoryLogRecord ) } is invoked . It is possible to format a log record with a locale other than * one set by this method using { @ link # formatRecord ( RepositoryLogRecord , Locale ) } . the formatter locale will be set to the system * locale until this method gets invoked . The dateFormat can be either the default format or the ISO - 8601 format . * @ param locale the Locale to be used for localizing the log record , < code > null < / code > to disable message localization . * @ param flag to use ISO - 8601 date format for output . */ public void setDateFormat ( Locale locale , boolean isoDateFormat ) { } }
this . locale = locale ; if ( null == locale ) { dateFormat = FormatSet . customizeDateFormat ( DateFormat . getDateTimeInstance ( DateFormat . SHORT , DateFormat . MEDIUM ) , isoDateFormat ) ; } else { dateFormat = FormatSet . customizeDateFormat ( DateFormat . getDateTimeInstance ( DateFormat . SHORT , DateFormat . MEDIUM , locale ) , isoDateFormat ) ; }
public class StateTransferInterceptor { /** * For non - tx write commands , we retry the command locally if the topology changed . * But we only retry on the originator , and only if the command doesn ' t have * the { @ code CACHE _ MODE _ LOCAL } flag . */ private Object handleNonTxWriteCommand ( InvocationContext ctx , WriteCommand command ) { } }
if ( trace ) log . tracef ( "handleNonTxWriteCommand for command %s, topology id %d" , command , command . getTopologyId ( ) ) ; updateTopologyId ( command ) ; // Only catch OutdatedTopologyExceptions on the originator if ( ! ctx . isOriginLocal ( ) ) { return invokeNext ( ctx , command ) ; } return invokeNextAndHandle ( ctx , command , handleNonTxWriteReturn ) ;
public class StandardMultipartFile { /** * Determine whether the multipart content is still available . If a temporary file has been moved , the content is no * longer available . */ protected boolean isAvailable ( ) { } }
// If in memory , it ' s available . if ( fileItem . isInMemory ( ) ) { return true ; } // Check actual existence of temporary file . if ( fileItem instanceof DiskFileItem ) { return ( ( DiskFileItem ) fileItem ) . getStoreLocation ( ) . exists ( ) ; } // Check whether current file size is different than original one . return ( fileItem . getSize ( ) == size ) ;
public class BaseDestinationHandler { /** * Create a new PtoPMessageItemStream and add it to this Destination ' s Localisations . * In addition to creating and adding it , this function also performs all the * necessary updates to make it a recognized part of the Destination . * @ param localisationIsRemote should be true if the localisation is remote . * @ param transaction The Transaction to add under . Cannot be null . * @ param messagingEngineUuid The uuid of the messaging engine that owns the localisation * @ return PtoPMessageItemStream the new PtoPMessageItemStream added . * @ throws SIResourceException if the add fails due to a Message Store problem . */ protected LocalizationPoint addNewPtoPLocalization ( boolean localisationIsRemote , TransactionCommon transaction , SIBUuid8 messagingEngineUuid , LocalizationDefinition destinationLocalizationDefinition , boolean queuePoint ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addNewPtoPLocalization" , new Object [ ] { Boolean . valueOf ( localisationIsRemote ) , transaction , messagingEngineUuid , destinationLocalizationDefinition , Boolean . valueOf ( queuePoint ) } ) ; if ( _ptoPRealization == null ) { _ptoPRealization = new JSPtoPRealization ( this , messageProcessor , _localisationManager ) ; _ptoPRealization . initialise ( ) ; } LocalizationPoint newMsgItemStream = _ptoPRealization . addNewPtoPLocalization ( localisationIsRemote , transaction , messagingEngineUuid , destinationLocalizationDefinition , queuePoint ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addNewPtoPLocalization" , newMsgItemStream ) ; return newMsgItemStream ;
public class ClassFileImporter { /** * Converts the supplied { @ link URL URLs } to { @ link Location locations } and delegates to * { @ link # importLocations ( Collection ) } */ @ PublicAPI ( usage = ACCESS ) public JavaClasses importUrls ( Collection < URL > urls ) { } }
return importLocations ( Locations . of ( urls ) ) ;
public class Base64 { /** * Low - level access to decoding ASCII characters in the form of a byte array . * < strong > Ignores GUNZIP option , if it ' s set . < / strong > This is not generally * a recommended method , although it is used internally as part of the * decoding process . Special case : if len = 0 , an empty array is returned . * Still , if you need more speed and reduced memory footprint ( and aren ' t * gzipping ) , consider this method . * @ param source * The Base64 encoded data * @ param options * Can specify options such as alphabet type to use * @ return decoded data * @ throws IOException * In case of error * @ since 2.3.1 */ @ Nonnull @ ReturnsMutableCopy public static byte [ ] decode ( @ Nonnull final byte [ ] source , final int options ) throws IOException { } }
return decode ( source , 0 , source . length , options ) ;
public class Configuration { /** * Set the value of the < code > name < / code > property to the given type . This * is equivalent to < code > set ( & lt ; name & gt ; , value . toString ( ) ) < / code > . * @ param name property name * @ param value new value */ public < T extends Enum < T > > void setEnum ( String name , T value ) { } }
set ( name , value . toString ( ) ) ;
public class Period { /** * Constructs a Period representing a duration more than * count units extending into the past . * @ param count the number of units . must be non - negative * @ param unit the unit * @ return the new Period */ public static Period moreThan ( float count , TimeUnit unit ) { } }
checkCount ( count ) ; return new Period ( ETimeLimit . MT , false , count , unit ) ;
public class MapMarkerBuilder { /** * Final build method of the builder , called once all parameters are setup for the Generic Marker * @ param markerContext the marker context is intended to hold a reference to the record set backing the marker , * this used later in pop up panel and hover label construction to aid the client in building relevant * markup from the record set backing the marker * @ param < T > A record set backing the marker such as RecordList . Record * @ return an instance of generic marker */ public < T > GenericMarker < T > createMarker ( T markerContext , GenericMapWidget mapWidget ) { } }
this . map = mapWidget ; validateRequiredParameters ( ) ; if ( map instanceof GoogleV3MapWidget ) { return new GoogleV3Marker < T > ( this , markerContext ) ; } if ( map instanceof OfflineMapWidget ) { return new OfflineMapMarker < T > ( this , markerContext ) ; } throw new UnsupportedOperationException ( ) ;
public class Client { /** * Closes the connection to the dbserver . This instance can no longer be used after this action . */ void close ( ) { } }
try { performTeardownExchange ( ) ; } catch ( IOException e ) { logger . warn ( "Problem reporting our intention to close the dbserver connection" , e ) ; } try { channel . close ( ) ; } catch ( IOException e ) { logger . warn ( "Problem closing dbserver client output channel" , e ) ; } try { os . close ( ) ; } catch ( IOException e ) { logger . warn ( "Problem closing dbserver client output stream" , e ) ; } try { is . close ( ) ; } catch ( IOException e ) { logger . warn ( "Problem closing dbserver client input stream" , e ) ; } try { socket . close ( ) ; } catch ( IOException e ) { logger . warn ( "Problem closing dbserver client socket" , e ) ; }
public class DatabaseContentReader { /** * Parse metadata from the sequence , store it into the DocumentMetadata * object passed in * @ param metadata * @ return uri of the document with this metadata * @ throws IOException */ private String parseMetadata ( DocumentMetadata metadata ) throws IOException { } }
ResultItem item = result . next ( ) ; String uri = item . asString ( ) ; if ( uri == null ) { throw new IOException ( "Missing document URI for metadata." ) ; } item = result . next ( ) ; // node - kind , must exist String nKind = item . asString ( ) ; metadata . setFormat ( nKind ) ; item = result . next ( ) ; // handle collections , may not be present while ( item != null && item . getItemType ( ) == ValueType . XS_STRING ) { if ( ! copyCollection ) { item = result . next ( ) ; continue ; } metadata . addCollection ( item . asString ( ) ) ; item = result . next ( ) ; } // handle permissions , may not be present StringBuilder buf = new StringBuilder ( ) ; buf . append ( "<perms>" ) ; while ( item != null && ValueType . ELEMENT == item . getItemType ( ) ) { if ( ! copyPermission ) { item = result . next ( ) ; continue ; } try { readPermission ( ( XdmElement ) item . getItem ( ) , metadata , buf ) ; } catch ( Exception e ) { throw new IOException ( e ) ; } item = result . next ( ) ; } buf . append ( "</perms>" ) ; metadata . setPermString ( buf . toString ( ) ) ; // handle quality , always present even if not requested ( barrier ) metadata . setQuality ( ( XSInteger ) item . getItem ( ) ) ; // handle metadata item = result . next ( ) ; if ( copyMetadata ) { XdmItem metaItem = item . getItem ( ) ; if ( metaItem instanceof JsonItem ) { JsonNode node = ( ( JsonItem ) metaItem ) . asJsonNode ( ) ; metadata . meta = new HashMap < String , String > ( node . size ( ) ) ; for ( Iterator < String > names = node . fieldNames ( ) ; names . hasNext ( ) ; ) { String key = names . next ( ) ; JsonNode nodeVal = node . get ( key ) ; metadata . meta . put ( key , nodeVal . asText ( ) ) ; } item = result . next ( ) ; } } // handle prop : properties node , optional // if not present , there will be a 0 as a marker if ( copyProperties && ValueType . ELEMENT == item . getItemType ( ) ) { String pString = item . asString ( ) ; if ( pString != null ) { metadata . setProperties ( pString ) ; } item = result . next ( ) ; } if ( ValueType . XS_INTEGER != item . getItemType ( ) ) { throw new IOException ( uri + " unexpected " + item . getItemType ( ) + " " + item . asString ( ) + ", expected " + ValueType . XS_INTEGER + " 0" ) ; } return uri ;
public class ForkJoinPool { /** * Pushes a possibly - external submission . */ private < T > ForkJoinTask < T > externalSubmit ( ForkJoinTask < T > task ) { } }
Thread t ; ForkJoinWorkerThread w ; WorkQueue q ; if ( task == null ) throw new NullPointerException ( ) ; if ( ( ( t = Thread . currentThread ( ) ) instanceof ForkJoinWorkerThread ) && ( w = ( ForkJoinWorkerThread ) t ) . pool == this && ( q = w . workQueue ) != null ) q . push ( task ) ; else externalPush ( task ) ; return task ;
public class AbstractResourceAdapterDeployer { /** * Verify deployment against bean validation * @ param deployment The deployment * @ exception DeployException Thrown in case of a violation */ @ SuppressWarnings ( "unchecked" ) private void verifyBeanValidation ( Deployment deployment ) throws DeployException { } }
if ( beanValidation != null ) { ValidatorFactory vf = null ; try { vf = beanValidation . getValidatorFactory ( ) ; javax . validation . Validator v = vf . getValidator ( ) ; Collection < String > l = deployment . getActivation ( ) . getBeanValidationGroups ( ) ; if ( l == null || l . isEmpty ( ) ) l = Arrays . asList ( javax . validation . groups . Default . class . getName ( ) ) ; Collection < Class < ? > > groups = new ArrayList < > ( ) ; for ( String clz : l ) { try { groups . add ( Class . forName ( clz , true , deployment . getClassLoader ( ) ) ) ; } catch ( ClassNotFoundException e ) { throw new DeployException ( bundle . unableToLoadBeanValidationGroup ( clz , deployment . getIdentifier ( ) ) , e ) ; } } Set failures = new HashSet ( ) ; if ( deployment . getResourceAdapter ( ) != null ) { Set f = v . validate ( deployment . getResourceAdapter ( ) . getResourceAdapter ( ) , groups . toArray ( new Class < ? > [ groups . size ( ) ] ) ) ; if ( ! f . isEmpty ( ) ) failures . addAll ( f ) ; } if ( deployment . getConnectionFactories ( ) != null ) { for ( org . ironjacamar . core . api . deploymentrepository . ConnectionFactory cf : deployment . getConnectionFactories ( ) ) { Set f = v . validate ( cf . getConnectionFactory ( ) , groups . toArray ( new Class < ? > [ groups . size ( ) ] ) ) ; if ( ! f . isEmpty ( ) ) failures . addAll ( f ) ; } } if ( deployment . getAdminObjects ( ) != null ) { for ( org . ironjacamar . core . api . deploymentrepository . AdminObject ao : deployment . getAdminObjects ( ) ) { Set f = v . validate ( ao . getAdminObject ( ) , groups . toArray ( new Class < ? > [ groups . size ( ) ] ) ) ; if ( ! f . isEmpty ( ) ) failures . addAll ( f ) ; } } if ( ! failures . isEmpty ( ) ) { throw new DeployException ( bundle . violationOfValidationRule ( deployment . getIdentifier ( ) ) , new ConstraintViolationException ( failures ) ) ; } } finally { if ( vf != null ) vf . close ( ) ; } }
public class Cache { /** * BLPOP 是列表的阻塞式 ( blocking ) 弹出原语 。 * 它是 LPOP 命令的阻塞版本 , 当给定列表内没有任何元素可供弹出的时候 , 连接将被 BLPOP 命令阻塞 , 直到等待超时或发现可弹出元素为止 。 * 当给定多个 key 参数时 , 按参数 key 的先后顺序依次检查各个列表 , 弹出第一个非空列表的头元素 。 * 参考 : http : / / redisdoc . com / list / blpop . html * 命令行 : BLPOP key [ key . . . ] timeout */ @ SuppressWarnings ( "rawtypes" ) public List blpop ( int timeout , Object ... keys ) { } }
Jedis jedis = getJedis ( ) ; try { List < byte [ ] > data = jedis . blpop ( timeout , keysToBytesArray ( keys ) ) ; return keyValueListFromBytesList ( data ) ; } finally { close ( jedis ) ; }
public class IosCertificateFactory { /** * Test whether array is a Base64 - encoded certificate . If so , return * the decoded content instead of the specified array . * @ see CertificateFactorySpi # engineGenerateCertificate ( InputStream ) */ private byte [ ] maybeDecodeBase64 ( byte [ ] byteArray ) { } }
try { String pem = new String ( byteArray ) ; // Remove required begin / end lines . pem = pem . substring ( BEGIN_CERT_LINE_LENGTH , pem . length ( ) - END_CERT_LINE_LENGTH ) ; return Base64 . getDecoder ( ) . decode ( pem ) ; } catch ( Exception e ) { // Not a valid PEM encoded certificate , return original array . return byteArray ; }
public class SQLiteDatabase { /** * Registers a CustomFunction callback as a function that can be called from * SQLite database triggers . * @ param name the name of the sqlite3 function * @ param numArgs the number of arguments for the function * @ param function callback to call when the function is executed * @ hide */ public void addCustomFunction ( String name , int numArgs , CustomFunction function ) { } }
// Create wrapper ( also validates arguments ) . SQLiteCustomFunction wrapper = new SQLiteCustomFunction ( name , numArgs , function ) ; synchronized ( mLock ) { throwIfNotOpenLocked ( ) ; mConfigurationLocked . customFunctions . add ( wrapper ) ; try { mConnectionPoolLocked . reconfigure ( mConfigurationLocked ) ; } catch ( RuntimeException ex ) { mConfigurationLocked . customFunctions . remove ( wrapper ) ; throw ex ; } }
public class NameUtil { /** * Translate the input ejbname to be used by ejbdeploy generated file name * according to the following rules . * 1 . Leading and trailing white spaces are trimmed . * 2 . All non - alphanumeric characters are replaced by " _ " to avoid invalid * file name character since < ejb - name > can be in any form . * 3 . Limit the return string to a pre - defined length Max _ EjbName _ Size . */ private static String translateEjbName ( String ejbName ) { } }
// trim leading and trailing blanks ejbName = ejbName . trim ( ) ; // limits to the first Max _ EjbName _ Size characters int len = ejbName . length ( ) ; if ( len > Max_EjbName_Size ) { len = Max_EjbName_Size ; } // translate non - alphanumeric characters to " _ " char translated [ ] = new char [ len ] ; for ( int i = 0 ; i < len ; ++ i ) { char curChar = ejbName . charAt ( i ) ; translated [ i ] = Character . isLetterOrDigit ( curChar ) ? curChar : '_' ; } return new String ( translated ) ;
public class Choice6 { /** * { @ inheritDoc } */ @ Override public < G > Choice6 < A , B , C , D , E , G > pure ( G g ) { } }
return f ( g ) ;
public class SpiderService { /** * Verify that all ColumnFamilies needed for the given application exist . */ private void verifyApplicationCFs ( ApplicationDefinition oldAppDef , ApplicationDefinition appDef ) { } }
// Add new table - level CFs : Tenant tenant = Tenant . getTenant ( appDef ) ; DBService dbService = DBService . instance ( tenant ) ; for ( TableDefinition tableDef : appDef . getTableDefinitions ( ) . values ( ) ) { dbService . createStoreIfAbsent ( objectsStoreName ( tableDef ) , true ) ; dbService . createStoreIfAbsent ( termsStoreName ( tableDef ) , true ) ; } // Delete obsolete table - level CFs : if ( oldAppDef != null ) { for ( TableDefinition oldTableDef : oldAppDef . getTableDefinitions ( ) . values ( ) ) { if ( appDef . getTableDef ( oldTableDef . getTableName ( ) ) == null ) { dbService . deleteStoreIfPresent ( objectsStoreName ( oldTableDef ) ) ; dbService . deleteStoreIfPresent ( termsStoreName ( oldTableDef ) ) ; } } }
public class PathUtils { /** * Answer the path with the last file name removed , using forward * slash ( ' / ' ) as the path separator character . * Answer null for a path which contains no slashes . * Answer null for a path which is a single slash character . * Answer the path which is a single slash character when * the path starts with a slash , which is not a single slash * character , and which contains no other slashes . * Answer the path with the trailing slash removed for * a path which has a trailing slash and which is not a single * slash character . * Answer the the path up to but not including the trailing * slash in all other cases . * For example : * For " / grandParent / parent / child " answer " / grandParent / parent " . * For " / grandParent / parent / " answer " / grandParent / parent " . * For " / parent " answer " / " . * For " / " answer null . * For " child " answer null . * For " " answer null . * @ param path The path with the last file named removed . An exception * will be thrown if the path is null . * @ return The path with the last file name removed . */ public static String getParent ( String path ) { } }
String parent = null ; int lastIndex = path . lastIndexOf ( '/' ) ; if ( lastIndex != - 1 ) { if ( path . length ( ) == 1 ) { parent = null ; } else if ( lastIndex == 0 ) { parent = "/" ; } else { parent = path . substring ( 0 , lastIndex ) ; } } return parent ;
public class NotifierBase { /** * Call a service method by using a task worker . * The same service will be retrieved each time this method is called . * This method is called from the JIT ( JRebirth Internal Thread ) < br / > * @ param wave the wave that contains all informations */ @ SuppressWarnings ( "unchecked" ) private void returnData ( final Wave wave ) { } }
// Use only the Service class to retrieve the same instance each time final Service service = globalFacade ( ) . serviceFacade ( ) . retrieve ( ( Class < Service > ) wave . componentClass ( ) ) ; if ( service == null ) { LOGGER . error ( SERVICE_NOT_FOUND_ERROR , wave . toString ( ) ) ; // When developer mode is activated an error will be thrown by logger // Otherwise the wave will be managed by UnprocessedWaveHandler this . unprocessedWaveHandler . manageUnprocessedWave ( SERVICE_NOT_FOUND_MESSAGE . getText ( ) , wave ) ; } else { // The inner task will be run into the JRebirth Thread Pool final ServiceTaskBase < ? > task = ( ServiceTaskBase < ? > ) service . returnData ( wave ) ; if ( task != null && CoreParameters . FOLLOW_UP_SERVICE_TASKS . get ( ) ) { globalFacade ( ) . serviceFacade ( ) . retrieve ( TaskTrackerService . class ) . trackTask ( task ) ; } }
public class DRL6Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 209:1 : annotationElementValuePair [ AnnotationDescr descr , AnnotatedDescrBuilder inDescrBuilder ] : key = ID EQUALS _ ASSIGN val = annotationValue [ inDescrBuilder ] ; */ public final void annotationElementValuePair ( AnnotationDescr descr , AnnotatedDescrBuilder inDescrBuilder ) throws RecognitionException { } }
Token key = null ; Object val = null ; try { // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 210:3 : ( key = ID EQUALS _ ASSIGN val = annotationValue [ inDescrBuilder ] ) // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 210:5 : key = ID EQUALS _ ASSIGN val = annotationValue [ inDescrBuilder ] { key = ( Token ) match ( input , ID , FOLLOW_ID_in_annotationElementValuePair1041 ) ; if ( state . failed ) return ; match ( input , EQUALS_ASSIGN , FOLLOW_EQUALS_ASSIGN_in_annotationElementValuePair1043 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_annotationValue_in_annotationElementValuePair1047 ) ; val = annotationValue ( inDescrBuilder ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { if ( buildDescr ) { descr . setKeyValue ( ( key != null ? key . getText ( ) : null ) , val ) ; } } } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving }
public class PolyLabel { /** * Signed distance from point to polygon outline ( negative if point is outside ) */ private static float pointToPolygonDist ( double x , double y , Polygon polygon ) { } }
boolean inside = false ; double minDistSq = Double . POSITIVE_INFINITY ; // External ring LineString exterior = polygon . getExteriorRing ( ) ; for ( int i = 0 , n = exterior . getNumPoints ( ) - 1 , j = n - 1 ; i < n ; j = i , i ++ ) { Coordinate a = exterior . getCoordinateN ( i ) ; Coordinate b = exterior . getCoordinateN ( j ) ; if ( ( ( a . y > y ) ^ ( b . y > y ) ) && ( x < ( b . x - a . x ) * ( y - a . y ) / ( b . y - a . y ) + a . x ) ) inside = ! inside ; double seqDistSq = getSegDistSq ( x , y , a , b ) ; minDistSq = Math . min ( minDistSq , seqDistSq ) ; } // Internal rings for ( int k = 0 ; k < polygon . getNumInteriorRing ( ) ; k ++ ) { LineString interior = polygon . getInteriorRingN ( k ) ; for ( int i = 0 , n = interior . getNumPoints ( ) - 1 , j = n - 1 ; i < n ; j = i , i ++ ) { Coordinate a = interior . getCoordinateN ( i ) ; Coordinate b = interior . getCoordinateN ( j ) ; if ( ( ( a . y > y ) ^ ( b . y > y ) ) && ( x < ( b . x - a . x ) * ( y - a . y ) / ( b . y - a . y ) + a . x ) ) inside = ! inside ; minDistSq = Math . min ( minDistSq , getSegDistSq ( x , y , a , b ) ) ; } } return ( float ) ( ( inside ? 1 : - 1 ) * Math . sqrt ( minDistSq ) ) ;
public class InternalServerErrorException { /** * Details about the error . * @ param errorDetails * Details about the error . */ @ com . fasterxml . jackson . annotation . JsonProperty ( "ErrorDetails" ) public void setErrorDetails ( java . util . Collection < ErrorDetail > errorDetails ) { } }
if ( errorDetails == null ) { this . errorDetails = null ; return ; } this . errorDetails = new java . util . ArrayList < ErrorDetail > ( errorDetails ) ;
public class WidgetUtil { /** * Gets the pre - made HTML Widget for the specified guild using the specified * settings . The widget will only display correctly if the guild in question * has the Widget enabled . Additionally , this method can be used independently * of being on the guild in question . * @ param guildId * the guild ID * @ param theme * the theme , light or dark * @ param width * the width of the widget * @ param height * the height of the widget * @ return a String containing the pre - made widget with the supplied settings */ public static String getPremadeWidgetHtml ( String guildId , WidgetTheme theme , int width , int height ) { } }
Checks . notNull ( guildId , "GuildId" ) ; Checks . notNull ( theme , "WidgetTheme" ) ; Checks . notNegative ( width , "Width" ) ; Checks . notNegative ( height , "Height" ) ; return String . format ( WIDGET_HTML , guildId , theme . name ( ) . toLowerCase ( ) , width , height ) ;
public class IdStrategy { /** * Generates a schema from the given class . If this strategy is part of a group , * the existing fields of that group ' s schema will be re - used . */ protected < T > Schema < T > newSchema ( Class < T > typeClass ) { } }
// check if this is part of a group if ( primaryGroup != null ) { // only pojos created by runtime schema support groups final Schema < T > s = primaryGroup . getSchemaWrapper ( typeClass , true ) . getSchema ( ) ; if ( s instanceof RuntimeSchema ) { final RuntimeSchema < T > rs = ( RuntimeSchema < T > ) s ; final ArrayList < Field < T > > fields = new ArrayList < MappedSchema . Field < T > > ( rs . fields . length ) ; for ( Field < T > f : rs . fields ) { final int groupFilter = f . groupFilter ; if ( groupFilter != 0 ) { final int set ; // set for exclusion if ( groupFilter > 0 ) { // inclusion set = ~ groupFilter & 0x7FFFFFFF ; } else { // exclusion set = - groupFilter ; } if ( 0 != ( groupId & set ) ) { // this field is excluded on the current group id continue ; } } fields . add ( f ) ; } final int size = fields . size ( ) ; if ( size == rs . fields . length ) { // nothing is excluded return rs ; } if ( size == 0 ) { throw new RuntimeException ( "All fields were excluded for " + rs . messageFullName ( ) + " on group " + groupId ) ; } return new RuntimeSchema < T > ( typeClass , fields , // the last field fields . get ( size - 1 ) . number , rs . instantiator ) ; } return s ; } return RuntimeSchema . createFrom ( typeClass , this ) ;
public class ANNISUserConfigurationManager { /** * Writes the user to the disk * @ param user * @ return True if successful . */ public boolean writeUser ( User user ) { } }
// save user info to file if ( resourcePath != null ) { lock . writeLock ( ) . lock ( ) ; try { File userDir = new File ( resourcePath , "users" ) ; if ( userDir . isDirectory ( ) ) { // get the file which corresponds to the user File userFile = new File ( userDir . getAbsolutePath ( ) , user . getName ( ) ) ; Properties props = user . toProperties ( ) ; try ( FileOutputStream out = new FileOutputStream ( userFile ) ) { props . store ( out , "" ) ; return true ; } catch ( IOException ex ) { log . error ( "Could not write users file" , ex ) ; } } } finally { lock . writeLock ( ) . unlock ( ) ; } } // end if resourcePath not null return false ;
public class CmsSynchronizeSettingsDialog { /** * Commits the edited synchronize settings to the user settings . < p > */ @ Override public void actionCommit ( ) { } }
List < Throwable > errors = new ArrayList < Throwable > ( ) ; try { // set the synchronize settings CmsUserSettings userSettings = new CmsUserSettings ( getCms ( ) ) ; m_synchronizeSettings . checkValues ( getCms ( ) ) ; userSettings . setSynchronizeSettings ( m_synchronizeSettings ) ; userSettings . save ( getCms ( ) ) ; setDialogObject ( null ) ; } catch ( Throwable t ) { errors . add ( t ) ; } // set the list of errors to display when saving failed setCommitErrors ( errors ) ;
public class TopLevelGedDocumentMongoToGedObjectVisitor { /** * { @ inheritDoc } */ @ Override public final void visit ( final FamilyDocumentMongo document ) { } }
gedObject = new Family ( parent , new ObjectId ( document . getString ( ) ) ) ;
public class ServiceContext { /** * Returns the service request . */ public static ServletResponse getContextResponse ( ) { } }
ServiceContext context = ( ServiceContext ) _localContext . get ( ) ; if ( context != null ) return context . _response ; else return null ;
public class MemorySegment { /** * Equals two memory segment regions . * @ param seg2 Segment to equal this segment with * @ param offset1 Offset of this segment to start equaling * @ param offset2 Offset of seg2 to start equaling * @ param length Length of the equaled memory region * @ return true if equal , false otherwise */ public final boolean equalTo ( MemorySegment seg2 , int offset1 , int offset2 , int length ) { } }
int i = 0 ; // we assume unaligned accesses are supported . // Compare 8 bytes at a time . while ( i <= length - 8 ) { if ( getLong ( offset1 + i ) != seg2 . getLong ( offset2 + i ) ) { return false ; } i += 8 ; } // cover the last ( length % 8 ) elements . while ( i < length ) { if ( get ( offset1 + i ) != seg2 . get ( offset2 + i ) ) { return false ; } i += 1 ; } return true ;
public class Tracer { /** * Return connection listener * @ param poolName The name of the pool * @ param mcp The managed connection pool * @ param cl The connection listener * @ param kill Kill the listener * @ param interleaving Interleaving flag * @ param callstack The call stack */ public static synchronized void returnConnectionListener ( String poolName , Object mcp , Object cl , boolean kill , boolean interleaving , Throwable callstack ) { } }
if ( ! interleaving ) { if ( ! kill ) { log . tracef ( "%s" , new TraceEvent ( poolName , Integer . toHexString ( System . identityHashCode ( mcp ) ) , TraceEvent . RETURN_CONNECTION_LISTENER , Integer . toHexString ( System . identityHashCode ( cl ) ) , ! confidential && callstack != null ? toString ( callstack ) : "" ) ) ; } else { log . tracef ( "%s" , new TraceEvent ( poolName , Integer . toHexString ( System . identityHashCode ( mcp ) ) , TraceEvent . RETURN_CONNECTION_LISTENER_WITH_KILL , Integer . toHexString ( System . identityHashCode ( cl ) ) , ! confidential && callstack != null ? toString ( callstack ) : "" ) ) ; } } else { if ( ! kill ) { log . tracef ( "%s" , new TraceEvent ( poolName , Integer . toHexString ( System . identityHashCode ( mcp ) ) , TraceEvent . RETURN_INTERLEAVING_CONNECTION_LISTENER , Integer . toHexString ( System . identityHashCode ( cl ) ) , ! confidential && callstack != null ? toString ( callstack ) : "" ) ) ; } else { log . tracef ( "%s" , new TraceEvent ( poolName , Integer . toHexString ( System . identityHashCode ( mcp ) ) , TraceEvent . RETURN_INTERLEAVING_CONNECTION_LISTENER_WITH_KILL , Integer . toHexString ( System . identityHashCode ( cl ) ) , ! confidential && callstack != null ? toString ( callstack ) : "" ) ) ; } }
public class LaJobRunner { protected void logJobException ( LaJobRuntime runtime , String bigMsg , Throwable cause ) { } }
// bigMsg contains stack - trace if ( isBusinessStoppedException ( cause ) ) { JobNoticeLog . log ( runtime . getNoticeLogLevel ( ) , ( ) -> bigMsg ) ; if ( noticeLogHook != null ) { noticeLogHook . hookStopped ( runtime , bigMsg , OptionalThing . of ( cause ) ) ; } } else { if ( errorLogHook != null ) { final OptionalThing < LaScheduledJob > job = jobManager . findJobByKey ( runtime . getJobKey ( ) ) ; errorLogHook . hookError ( new JobErrorResource ( job , OptionalThing . of ( runtime ) , bigMsg , cause ) ) ; } JobErrorLog . log ( bigMsg ) ; // not use second argument here , same reason as logging filter }
public class ScoreBuildHistogram { /** * giving it an improved prediction ) . */ private void score_decide ( Chunk chks [ ] , Chunk nids , Chunk wrks , Chunk tree , int nnids [ ] ) { } }
for ( int row = 0 ; row < nids . _len ; row ++ ) { // Over all rows int nid = ( int ) nids . at80 ( row ) ; // Get Node to decide from if ( isDecidedRow ( nid ) ) { // already done nnids [ row ] = ( nid - _leaf ) ; continue ; } // Score row against current decisions & assign new split boolean oob = isOOBRow ( nid ) ; if ( oob ) nid = oob2Nid ( nid ) ; // sampled away - we track the position in the tree DTree . DecidedNode dn = _tree . decided ( nid ) ; if ( dn . _split . _col == - 1 && DTree . isRootNode ( dn ) ) { nnids [ row ] = ( nid - _leaf ) ; continue ; } if ( dn . _split . _col == - 1 ) { // Might have a leftover non - split nid = dn . _pid ; // Use the parent split decision then int xnid = oob ? nid2Oob ( nid ) : nid ; nids . set0 ( row , xnid ) ; nnids [ row ] = xnid - _leaf ; dn = _tree . decided ( nid ) ; // Parent steers us } assert ! isDecidedRow ( nid ) ; nid = dn . ns ( chks , row ) ; // Move down the tree 1 level if ( ! isDecidedRow ( nid ) ) { int xnid = oob ? nid2Oob ( nid ) : nid ; nids . set0 ( row , xnid ) ; nnids [ row ] = xnid - _leaf ; } else { nnids [ row ] = nid - _leaf ; } }