signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ConfigurationMetadataRepositoryJsonBuilder { /** * Create a new builder instance using { @ link StandardCharsets # UTF _ 8 } as the default * charset and the specified json resource . * @ param inputStreams the source input streams * @ return a new { @ link ConfigurationMetadataRepositoryJsonBuilder } instance . * @ throws IOException on error */ public static ConfigurationMetadataRepositoryJsonBuilder create ( InputStream ... inputStreams ) throws IOException { } }
ConfigurationMetadataRepositoryJsonBuilder builder = create ( ) ; for ( InputStream inputStream : inputStreams ) { builder = builder . withJsonResource ( inputStream ) ; } return builder ;
public class RtfDestinationShppict { /** * / * ( non - Javadoc ) * @ see com . lowagie . text . rtf . direct . RtfDestination # handleGroupEnd ( ) */ public boolean handleCloseGroup ( ) { } }
this . onCloseGroup ( ) ; // event handler if ( this . rtfParser . isImport ( ) ) { if ( this . buffer . length ( ) > 0 ) { writeBuffer ( ) ; } if ( dataOS != null ) { addImage ( ) ; dataOS = null ; } this . writeText ( "}" ) ; return true ; } if ( this . rtfParser . isConvert ( ) ) { if ( dataOS != null ) { addImage ( ) ; dataOS = null ; } } return true ;
public class SipResourceAdaptor { /** * ( non - Javadoc ) * @ see javax . slee . resource . ResourceAdaptor # raInactive ( ) */ public synchronized void raInactive ( ) { } }
this . provider . removeSipListener ( this ) ; ListeningPoint [ ] listeningPoints = this . provider . getListeningPoints ( ) ; for ( int i = 0 ; i < listeningPoints . length ; i ++ ) { ListeningPoint lp = listeningPoints [ i ] ; for ( int k = 0 ; k < 10 ; k ++ ) { try { this . sipStack . deleteListeningPoint ( lp ) ; this . sipStack . deleteSipProvider ( this . provider ) ; break ; } catch ( ObjectInUseException ex ) { tracer . severe ( "Object in use -- retrying to delete listening point" , ex ) ; try { Thread . sleep ( 100 ) ; } catch ( Exception e ) { } } } } this . providerWrapper . raInactive ( ) ; this . sipStack . stop ( ) ; if ( tracer . isFineEnabled ( ) ) { tracer . fine ( "Sip Resource Adaptor entity inactive." ) ; }
public class LSrtSupplierBuilder { /** * Builds the functional interface implementation and if previously provided calls the consumer . */ @ Nonnull public final LSrtSupplier build ( ) { } }
final LSrtSupplier eventuallyFinal = this . eventually ; LSrtSupplier retval ; final Case < LBoolSupplier , LSrtSupplier > [ ] casesArray = cases . toArray ( new Case [ cases . size ( ) ] ) ; retval = LSrtSupplier . srtSup ( ( ) -> { try { for ( Case < LBoolSupplier , LSrtSupplier > aCase : casesArray ) { if ( aCase . casePredicate ( ) . getAsBool ( ) ) { return aCase . caseFunction ( ) . getAsSrt ( ) ; } } return eventuallyFinal . getAsSrt ( ) ; } catch ( Error e ) { // NOSONAR throw e ; } catch ( Throwable e ) { // NOSONAR throw Handler . handleOrPropagate ( e , handling ) ; } } ) ; if ( consumer != null ) { consumer . accept ( retval ) ; } return retval ;
public class DatalogDependencyGraphGenerator { /** * Updates the { @ link # predicateDependencyGraph } by the input rule . * It adds all the edges < rule . head . pred , p > to * { @ link # predicateDependencyGraph } , for all the p in the predicates of the * rule body * @ param rule */ private void updatePredicateDependencyGraph ( CQIE rule ) { } }
List < Predicate > dependencyList = new LinkedList < > ( ) ; for ( Function bodyAtom : rule . getBody ( ) ) { if ( bodyAtom . isDataFunction ( ) ) { dependencyList . add ( bodyAtom . getFunctionSymbol ( ) ) ; } else if ( bodyAtom . isAlgebraFunction ( ) || bodyAtom . isOperation ( ) ) { updatePredicateDependencyGraph_traverseBodyAtom ( dependencyList , bodyAtom ) ; // } else if ( bodyAtom . isArithmeticFunction ( ) | | bodyAtom . isDataTypeFunction ( ) { } else if ( bodyAtom . isDataTypeFunction ( ) ) { continue ; } else { throw new IllegalStateException ( "Unknown Function" ) ; } } Predicate headPred = rule . getHead ( ) . getFunctionSymbol ( ) ; predicateDependencyGraph . addVertex ( headPred ) ; for ( Predicate dependentPred : dependencyList ) { predicateDependencyGraph . addVertex ( dependentPred ) ; predicateDependencyGraph . addEdge ( headPred , dependentPred ) ; }
public class StringUtils { /** * Generates a random string of a given length * @ param length The length of the string * @ param min The min character in the string * @ param max The max character in the string * @ param validChar CharPredicate that must match for a character to be returned in the string * @ return A string of random characters */ public static String randomString ( int length , int min , int max , @ NonNull CharMatcher validChar ) { } }
if ( length <= 0 ) { return EMPTY ; } Random random = new Random ( ) ; int maxRandom = max - min ; char [ ] array = new char [ length ] ; for ( int i = 0 ; i < array . length ; i ++ ) { char c ; do { c = ( char ) ( random . nextInt ( maxRandom ) + min ) ; } while ( Character . isLowSurrogate ( c ) || Character . isHighSurrogate ( c ) || ! validChar . matches ( c ) ) ; array [ i ] = c ; } return new String ( array ) ;
public class BinTrie { /** * 保存到二进制输出流 * @ param out * @ return */ public boolean save ( DataOutputStream out ) { } }
try { for ( BaseNode node : child ) { if ( node == null ) { out . writeInt ( 0 ) ; } else { out . writeInt ( 1 ) ; node . walkToSave ( out ) ; } } } catch ( Exception e ) { logger . warning ( "保存到" + out + "失败" + TextUtility . exceptionToString ( e ) ) ; return false ; } return true ;
public class DirectoryOperation { /** * Determine whether the operation contains a file . * @ param f * @ return boolean */ public boolean containsFile ( File f ) { } }
return unchangedFiles . contains ( f ) || newFiles . contains ( f ) || updatedFiles . contains ( f ) || deletedFiles . contains ( f ) || recursedDirectories . contains ( f ) || failedTransfers . containsKey ( f ) ;
public class DBManagerService { /** * throws a DBNotAvailableException , keep trying . */ private DBService createDefaultDBService ( ) { } }
m_logger . info ( "Creating DBService for default tenant" ) ; String dbServiceName = ServerParams . instance ( ) . getModuleParamString ( "DBService" , "dbservice" ) ; if ( Utils . isEmpty ( dbServiceName ) ) { throw new RuntimeException ( "'DBService.dbservice' parameter is not defined." ) ; } DBService dbservice = null ; Tenant defaultTenant = TenantService . instance ( ) . getDefaultTenant ( ) ; boolean bDBOpened = false ; while ( ! bDBOpened ) { try { // Find and call the constructor DBService ( Tenant ) . @ SuppressWarnings ( "unchecked" ) Class < DBService > serviceClass = ( Class < DBService > ) Class . forName ( dbServiceName ) ; Constructor < DBService > constructor = serviceClass . getConstructor ( Tenant . class ) ; dbservice = constructor . newInstance ( defaultTenant ) ; dbservice . initialize ( ) ; dbservice . start ( ) ; bDBOpened = true ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( "Cannot load specified 'dbservice': " + dbServiceName , e ) ; } catch ( ClassNotFoundException e ) { throw new RuntimeException ( "Could not load dbservice class '" + dbServiceName + "'" , e ) ; } catch ( NoSuchMethodException e ) { throw new RuntimeException ( "Required constructor missing for dbservice class: " + dbServiceName , e ) ; } catch ( SecurityException | InstantiationException | IllegalAccessException e ) { throw new RuntimeException ( "Could not invoke constructor for dbservice class: " + dbServiceName , e ) ; } catch ( InvocationTargetException e ) { // This is thrown when a constructor is invoked via reflection . if ( ! ( e . getTargetException ( ) instanceof DBNotAvailableException ) ) { throw new RuntimeException ( "Could not invoke constructor for dbservice class: " + dbServiceName , e ) ; } } catch ( DBNotAvailableException e ) { // Fall through to retry . } catch ( Throwable e ) { throw new RuntimeException ( "Failed to initialize default DBService: " + dbServiceName , e ) ; } if ( ! bDBOpened ) { m_logger . info ( "Database is not reachable. Waiting to retry" ) ; try { Thread . sleep ( db_connect_retry_wait_millis ) ; } catch ( InterruptedException ex2 ) { // ignore } } } return dbservice ;
public class BeamSearch { /** * Returns the best sequence of outcomes based on model for this object . * @ param numSequences The maximum number of sequences to be returned . * @ param sequence The input sequence . * @ param additionalContext An Object [ ] of additional context . This is passed to the context generator blindly with the assumption that the context are appropiate . * @ param minSequenceScore A lower bound on the score of a returned sequence . * @ return An array of the top ranked sequences of outcomes . */ public Sequence [ ] bestSequences ( int numSequences , Object [ ] sequence , Object [ ] additionalContext , double minSequenceScore ) { } }
int n = sequence . length ; Heap prev = new ListHeap ( size ) ; Heap next = new ListHeap ( size ) ; Heap tmp ; prev . add ( new Sequence ( ) ) ; if ( additionalContext == null ) { additionalContext = EMPTY_ADDITIONAL_CONTEXT ; } for ( int i = 0 ; i < n ; i ++ ) { int sz = Math . min ( size , prev . size ( ) ) ; int sc = 0 ; for ( ; prev . size ( ) > 0 && sc < sz ; sc ++ ) { Sequence top = ( Sequence ) prev . extract ( ) ; List tmpOutcomes = top . getOutcomes ( ) ; String [ ] outcomes = ( String [ ] ) tmpOutcomes . toArray ( new String [ tmpOutcomes . size ( ) ] ) ; String [ ] contexts = cg . getContext ( i , sequence , outcomes , additionalContext ) ; double [ ] scores ; if ( contextsCache != null ) { scores = ( double [ ] ) contextsCache . get ( contexts ) ; if ( scores == null ) { scores = model . eval ( contexts , probs ) ; contextsCache . put ( contexts , scores ) ; } } else { scores = model . eval ( contexts , probs ) ; } double [ ] temp_scores = new double [ scores . length ] ; for ( int c = 0 ; c < scores . length ; c ++ ) { temp_scores [ c ] = scores [ c ] ; } Arrays . sort ( temp_scores ) ; double min = temp_scores [ Math . max ( 0 , scores . length - size ) ] ; for ( int p = 0 ; p < scores . length ; p ++ ) { if ( scores [ p ] < min ) continue ; // only advance first " size " outcomes String out = model . getOutcome ( p ) ; if ( validSequence ( i , sequence , outcomes , out ) ) { Sequence ns = new Sequence ( top , out , scores [ p ] ) ; if ( ns . getScore ( ) > minSequenceScore ) { next . add ( ns ) ; } } } if ( next . size ( ) == 0 ) { // if no advanced sequences , advance all valid for ( int p = 0 ; p < scores . length ; p ++ ) { String out = model . getOutcome ( p ) ; if ( validSequence ( i , sequence , outcomes , out ) ) { Sequence ns = new Sequence ( top , out , scores [ p ] ) ; if ( ns . getScore ( ) > minSequenceScore ) { next . add ( ns ) ; } } } } } // make prev = next ; and re - init next ( we reuse existing prev set once we clear it ) prev . clear ( ) ; tmp = prev ; prev = next ; next = tmp ; } int numSeq = Math . min ( numSequences , prev . size ( ) ) ; Sequence [ ] topSequences = new Sequence [ numSeq ] ; int seqIndex = 0 ; for ( ; seqIndex < numSeq ; seqIndex ++ ) { topSequences [ seqIndex ] = ( Sequence ) prev . extract ( ) ; } return topSequences ;
public class WCOutputStream { /** * @ see javax . servlet . ServletOutputStream # println ( float ) */ public void println ( float f ) throws IOException { } }
String value = Float . toString ( f ) ; this . output . write ( value . getBytes ( ) , 0 , value . length ( ) ) ; this . output . write ( CRLF , 0 , 2 ) ;
public class HttpMethodBase { /** * Returns the URI of the HTTP method * @ return The URI * @ throws URIException If the URI cannot be created . * @ see org . apache . commons . httpclient . HttpMethod # getURI ( ) */ @ Override public URI getURI ( ) throws URIException { } }
StringBuffer buffer = new StringBuffer ( ) ; if ( this . httphost != null ) { buffer . append ( this . httphost . getProtocol ( ) . getScheme ( ) ) ; buffer . append ( "://" ) ; buffer . append ( this . httphost . getHostName ( ) ) ; int port = this . httphost . getPort ( ) ; if ( port != - 1 && port != this . httphost . getProtocol ( ) . getDefaultPort ( ) ) { buffer . append ( ":" ) ; buffer . append ( port ) ; } } buffer . append ( this . path ) ; if ( this . queryString != null ) { buffer . append ( '?' ) ; buffer . append ( this . queryString ) ; } String charset = getParams ( ) . getUriCharset ( ) ; return new URI ( buffer . toString ( ) , true , charset ) ;
public class ClipboardUtils { /** * Get the current text from the clipboard . * @ return Clipboard text or { @ code null } if clipboard is empty or unavailable . */ public static String getClipboardText ( final Context context ) { } }
final ClipboardManager clipboard = ( ClipboardManager ) context . getSystemService ( Context . CLIPBOARD_SERVICE ) ; final ClipData clipData = clipboard . getPrimaryClip ( ) ; if ( clipData != null && clipData . getItemCount ( ) > 0 ) { final CharSequence clipboardText = clipData . getItemAt ( 0 ) . getText ( ) ; if ( clipboardText != null ) { return clipboardText . toString ( ) ; } } return null ;
public class SubFileFilter { /** * Constructor . * @ param record My owner ( usually passed as null , and set on addListener in setOwner ( ) ) . * @ param recordMain The main record to create a sub - query for . * @ param fldMainFile First field in the key fields . * @ param fldMainFile2 Second field in the key fields . * @ param fldMainFile3 Third field in the key fields . * @ param iFieldSeq The First field sequence of the key . * @ param iFieldSeq2 The Second field sequence of the key ( - 1 for none ) . * @ param iFieldSeq3 The Third field sequence of the key ( - 1 for none ) . * @ param bSetFilterIfNull If true , this will filter if the target field ( s ) are null ( usually a empty query set ) ( defaults to true [ no filter = all records ] ) . * @ param bRefreshLastIfNotCurrent If true , this class will refresh the last record if the record is not current . * @ param bRefreshLastIfNotCurrent ( Typically used for remote sessions where the remote method does an add before the detail can add ) . */ public void init ( Record record , Record recordMain , String keyName , BaseField fldMainFile , String fldMainFileName , BaseField fldMainFile2 , String fldMainFileName2 , BaseField fldMainFile3 , String fldMainFileName3 , boolean bSetFilterIfNull , boolean bRefreshLastIfNotCurrent , boolean bAddNewHeaderOnAdd ) { } }
// For this to work right , the booking number field needs a listener to re - select this file whenever it changes super . init ( record , fldMainFileName , null , fldMainFileName2 , null , fldMainFileName3 , null ) ; m_recordMain = recordMain ; m_strKeyName = keyName ; m_fldMainFile = fldMainFile ; m_fldMainFile2 = fldMainFile2 ; m_fldMainFile3 = fldMainFile3 ; m_bSetFilterIfNull = bSetFilterIfNull ; m_bRefreshLastIfNotCurrent = bRefreshLastIfNotCurrent ; m_bAddNewHeaderOnAdd = bAddNewHeaderOnAdd ; if ( fldMainFile != null ) fldMainFile . addListener ( new FieldRemoveBOnCloseHandler ( this ) ) ; // Remove this if you close the file first else if ( recordMain != null ) recordMain . addListener ( new FileRemoveBOnCloseHandler ( this ) ) ;
public class PlannerReader { /** * This method extracts resource data from a Planner file . * @ param plannerProject Root node of the Planner file */ private void readResources ( Project plannerProject ) throws MPXJException { } }
Resources resources = plannerProject . getResources ( ) ; if ( resources != null ) { for ( net . sf . mpxj . planner . schema . Resource res : resources . getResource ( ) ) { readResource ( res ) ; } }
public class Html5DatatypeLibraryFactory { /** * Returns a < code > Html5DatatypeLibrary < / code > on the library namespace and < code > null < / code > * otherwise . * @ param namespaceURI a namespace URI * @ return a < code > DatatypeLibrary < / code > or < code > null < / code > * @ see org . relaxng . datatype . DatatypeLibraryFactory # createDatatypeLibrary ( java . lang . String ) */ @ Override public DatatypeLibrary createDatatypeLibrary ( String namespaceURI ) { } }
if ( NAMESPACE . equals ( namespaceURI ) ) { return new Html5DatatypeLibrary ( ) ; } return null ;
public class FieldProducerFactory { /** * Producers returned from this method are not validated . Internal use only . */ @ Override public < T > Producer < T > createProducer ( final Bean < X > declaringBean , final Bean < T > bean , DisposalMethod < X , T > disposalMethod ) { } }
EnhancedAnnotatedField < T , X > enhancedField = getManager ( ) . getServices ( ) . get ( MemberTransformer . class ) . loadEnhancedMember ( field , getManager ( ) . getId ( ) ) ; return new ProducerFieldProducer < X , T > ( enhancedField , disposalMethod ) { @ Override public AnnotatedField < X > getAnnotated ( ) { return field ; } @ Override public BeanManagerImpl getBeanManager ( ) { return getManager ( ) ; } @ Override public Bean < X > getDeclaringBean ( ) { return declaringBean ; } @ Override public Bean < T > getBean ( ) { return bean ; } } ;
public class SftpFileAttributes { /** * Determine whether these attributes refer to a symbolic link . * @ return boolean */ public boolean isLink ( ) { } }
if ( sftp . getVersion ( ) > 3 ) { return type == SSH_FILEXFER_TYPE_SYMLINK ; } else if ( permissions != null && ( permissions . longValue ( ) & SftpFileAttributes . S_IFLNK ) == SftpFileAttributes . S_IFLNK ) { return true ; } else { return false ; }
public class DSXMarketDataServiceRawCore { /** * Get recent trades from exchange * @ param pairs String of currency pairs to retrieve ( e . g . " btcusd - btceur " ) * @ param size Integer value from 1 - > get corresponding number of items * @ return DSXTradesWrapper * @ throws IOException */ public DSXTradesWrapper getDSXTrades ( String pairs , int size , String type ) throws IOException { } }
if ( size < 1 ) { size = 1 ; } if ( size > FULL_SIZE ) { size = FULL_SIZE ; } return dsx . getTrades ( pairs . toLowerCase ( ) , size , 1 , type ) ;
public class NameNode { /** * { @ inheritDoc } * implement old API for backwards compatibility */ @ Override @ Deprecated public FileStatus [ ] getCorruptFiles ( ) throws IOException { } }
CorruptFileBlocks corruptFileBlocks = listCorruptFileBlocks ( "/" , null ) ; Set < String > filePaths = new HashSet < String > ( ) ; for ( String file : corruptFileBlocks . getFiles ( ) ) { filePaths . add ( file ) ; } List < FileStatus > fileStatuses = new ArrayList < FileStatus > ( filePaths . size ( ) ) ; for ( String f : filePaths ) { FileStatus fs = getFileInfo ( f ) ; if ( fs != null ) LOG . info ( "found fs for " + f ) ; else LOG . info ( "found no fs for " + f ) ; fileStatuses . add ( fs ) ; } return fileStatuses . toArray ( new FileStatus [ fileStatuses . size ( ) ] ) ;
public class FunctionCall { /** * Sets function argument list * @ param arguments function argument list . Can be { @ code null } , * in which case any existing args are removed . */ public void setArguments ( List < AstNode > arguments ) { } }
if ( arguments == null ) { this . arguments = null ; } else { if ( this . arguments != null ) this . arguments . clear ( ) ; for ( AstNode arg : arguments ) { addArgument ( arg ) ; } }
public class JavaUtils { /** * Load a Java type from a given class loader . * @ param typeName maybe the source notation of a primitve , class name , array of both */ public static Class < ? > loadJavaType ( String typeName , ClassLoader classLoader ) throws ClassNotFoundException { } }
if ( classLoader == null ) classLoader = getContextClassLoader ( ) ; Class < ? > javaType = primitiveNames . get ( typeName ) ; if ( javaType == null ) javaType = getArray ( typeName , classLoader ) ; if ( javaType == null ) javaType = classLoader . loadClass ( typeName ) ; return javaType ;
public class Billing { /** * < pre > * Billing configurations for sending metrics to the consumer project . * There can be multiple consumer destinations per service , each one must have * a different monitored resource type . A metric can be used in at most * one consumer destination . * < / pre > * < code > repeated . google . api . Billing . BillingDestination consumer _ destinations = 8 ; < / code > */ public com . google . api . Billing . BillingDestination getConsumerDestinations ( int index ) { } }
return consumerDestinations_ . get ( index ) ;
public class MeasureFactory { /** * Create { @ link PullMeasure } s based on the getters available from an * instance , whatever it is . The { @ link Class } of the instance is analyzed * to retrieve its public methods and a { @ link PullMeasure } is built for * each method which use a getter - like signature . The name of the method is * further exploited to identify the measure , such that the map returned use * the name of the method ( without " get " ) as a key which maps to the * { @ link PullMeasure } built from this method . The { @ link PullMeasure } * itself is named by using the name of the method . * @ param object * the { @ link Object } to cover * @ return the { @ link Map } which contains the names of the getter methods * and the corresponding { @ link PullMeasure } built from them */ @ SuppressWarnings ( "serial" ) public Map < String , PullMeasure < ? > > createPullsFromGetters ( final Object object ) { } }
Map < String , PullMeasure < ? > > measures = new HashMap < String , PullMeasure < ? > > ( ) ; Class < ? extends Object > clazz = object . getClass ( ) ; for ( final Method method : clazz . getMethods ( ) ) { if ( method . getParameterTypes ( ) . length == 0 && ! method . getReturnType ( ) . equals ( Void . TYPE ) && ! method . getName ( ) . equals ( "getClass" ) && method . getName ( ) . matches ( "get[^a-z].*" ) ) { String key = method . getName ( ) . substring ( 3 ) ; // TODO exploit return type to restrict the generics measures . put ( key , new SimplePullMeasure < Object > ( key ) { @ Override public Object get ( ) { try { return method . invoke ( object ) ; } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { throw new RuntimeException ( e ) ; } } } ) ; } else { // not a getter , ignore it } } return measures ;
public class ArrayHelper { /** * Get a 1:1 copy of the passed array using the passed number of array * elements . Nested elements are not deep - copied - the references are re - used ! * @ param aArray * The array to be copied . * @ param nLength * The number of elements to be copied into the new array . May not be * & lt ; 0 . If the passed number of elements exceeds the number of * elements in the array , only the available number of elements in the * source array are copied . * @ return < code > null < / code > if the passed array is < code > null < / code > - a non - * < code > null < / code > copy otherwise . */ @ Nullable @ ReturnsMutableCopy public static float [ ] getCopy ( @ Nullable final float [ ] aArray , @ Nonnegative final int nLength ) { } }
return aArray == null ? null : getCopy ( aArray , 0 , Math . min ( aArray . length , nLength ) ) ;
public class UriEscaper { /** * Escapes a string as a URI query * @ param query the path to escape * @ param strict whether or not to do strict escaping * @ return the escaped string */ public static String escapeQuery ( final String query , final boolean strict ) { } }
return ( strict ? STRICT_ESCAPER : ESCAPER ) . escapeQuery ( query ) ;
public class MarkerUtil { /** * As a side - effect this method updates missing line information for some * bugs stored in the given bug collection * @ param project * @ param theCollection * @ return never null */ public static List < MarkerParameter > createBugParameters ( IJavaProject project , BugCollection theCollection , IProgressMonitor monitor ) { } }
List < MarkerParameter > bugParameters = new ArrayList < > ( ) ; if ( project == null ) { FindbugsPlugin . getDefault ( ) . logException ( new NullPointerException ( "project is null" ) , "project is null" ) ; return bugParameters ; } Iterator < BugInstance > iterator = theCollection . iterator ( ) ; while ( iterator . hasNext ( ) && ! monitor . isCanceled ( ) ) { BugInstance bug = iterator . next ( ) ; DetectorFactory detectorFactory = bug . getDetectorFactory ( ) ; if ( detectorFactory != null && ! detectorFactory . getPlugin ( ) . isGloballyEnabled ( ) ) { continue ; } MarkerParameter mp = createMarkerParameter ( project , bug ) ; if ( mp != null ) { bugParameters . add ( mp ) ; } } return bugParameters ;
public class IssueTrackingDelegator { /** * Special case where we want to do the issue tracking with the merge branch , and copy matched issue to the current branch . */ private boolean isFirstAnalysisSecondaryLongLivingBranch ( ) { } }
if ( analysisMetadataHolder . isFirstAnalysis ( ) ) { Branch branch = analysisMetadataHolder . getBranch ( ) ; return ! branch . isMain ( ) && branch . getType ( ) == BranchType . LONG ; } return false ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcStructuralSurfaceReaction ( ) { } }
if ( ifcStructuralSurfaceReactionEClass == null ) { ifcStructuralSurfaceReactionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 664 ) ; } return ifcStructuralSurfaceReactionEClass ;
public class CombinedLayertree { /** * - - part of legend */ @ Override protected void initialize ( ) { } }
super . initialize ( ) ; ClientLayerTreeInfo ltwli = ( ClientLayerTreeInfo ) mapWidget . getMapModel ( ) . getMapInfo ( ) . getWidgetInfo ( ClientLayerTreeInfo . IDENTIFIER ) ; setIconSize ( ltwli == null ? GltLayout . layerTreeIconSize : ltwli . getIconSize ( ) ) ; for ( Layer < ? > layer : mapModel . getLayers ( ) ) { registrations . add ( layer . addLayerChangedHandler ( new LayerChangedHandler ( ) { public void onLabelChange ( LayerLabeledEvent event ) { GWT . log ( "Legend: onLabelChange() - " + event . getLayer ( ) . getLabel ( ) ) ; // find the node & update the icon for ( TreeNode node : tree . getAllNodes ( ) ) { if ( node . getName ( ) . equals ( event . getLayer ( ) . getLabel ( ) ) && node instanceof LayerTreeTreeNode ) { ( ( LayerTreeTreeNode ) node ) . updateIcon ( ) ; } } } public void onVisibleChange ( LayerShownEvent event ) { GWT . log ( "Legend: onVisibleChange() - " + event . getLayer ( ) . getLabel ( ) ) ; // find the node & update the icon for ( TreeNode node : tree . getAllNodes ( ) ) { if ( node . getName ( ) . equals ( event . getLayer ( ) . getLabel ( ) ) && node instanceof LayerTreeTreeNode ) { ( ( LayerTreeTreeNode ) node ) . updateIcon ( ) ; } } } } ) ) ; registrations . add ( layer . addLayerStyleChangedHandler ( new LayerStyleChangedHandler ( ) { public void onLayerStyleChange ( LayerStyleChangeEvent event ) { GWT . log ( "Legend: onLayerStyleChange()" ) ; Layer < ? > layer = event . getLayer ( ) ; if ( layer instanceof VectorLayer ) { for ( LayerTreeLegendItemNode node : legendIcons . get ( layer ) ) { node . updateStyle ( ( VectorLayer ) layer ) ; } } } } ) ) ; if ( layer instanceof VectorLayer ) { VectorLayer vl = ( VectorLayer ) layer ; registrations . add ( vl . addLayerFilteredHandler ( new LayerFilteredHandler ( ) { public void onFilterChange ( LayerFilteredEvent event ) { GWT . log ( "Legend: onLayerFilterChange() - " + event . getLayer ( ) . getLabel ( ) ) ; // find the node & update the icon for ( TreeNode node : tree . getAllNodes ( ) ) { if ( node . getName ( ) . equals ( event . getLayer ( ) . getLabel ( ) ) && node instanceof LayerTreeTreeNode ) { ( ( LayerTreeTreeNode ) node ) . updateIcon ( ) ; } } } } ) ) ; } }
public class FIPXMLParser { /** * Parse a product descriptor from a file containing a swap trade . * @ param file File containing a swap trade . * @ return Product descriptor extracted from the file . * @ throws SAXException Thrown by the xml parser . * @ throws IOException Thrown if the file in not found or another IO error occured . * @ throws ParserConfigurationException Thrown by the xml parser . */ public InterestRateSwapProductDescriptor getSwapProductDescriptor ( File file ) throws SAXException , IOException , ParserConfigurationException { } }
Document doc = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) . parse ( file ) ; doc . getDocumentElement ( ) . normalize ( ) ; // Check compatibility if ( ! doc . getDocumentElement ( ) . getNodeName ( ) . equalsIgnoreCase ( "FIPXML" ) ) { throw new IllegalArgumentException ( "This parser is meant for XML of type FIPXML, but file was " + doc . getDocumentElement ( ) . getNodeName ( ) + "." ) ; } if ( doc . getElementsByTagName ( "instrumentName" ) . item ( 0 ) . getTextContent ( ) . equalsIgnoreCase ( "Interest Rate Swap" ) ) { if ( doc . getElementsByTagName ( "legAgreement" ) . getLength ( ) != 2 ) { throw new IllegalArgumentException ( "Unknown swap configuration. Number of swap legs was " + doc . getElementsByTagName ( "legAgreement" ) . getLength ( ) ) ; } } else { throw new IllegalArgumentException ( "This xml parser is not set up to process trade of type " + doc . getElementsByTagName ( "instrumentName" ) . item ( 0 ) . getTextContent ( ) ) ; } DayCountConvention daycountConvention = DayCountConventionFactory . getDayCountConvention ( doc . getElementsByTagName ( "dayCountFraction" ) . item ( 0 ) . getTextContent ( ) ) ; // TODO try to get curves from file . Problems if there are two float / fixed legs // forward curve String forwardCurveName = null ; NodeList temp = doc . getElementsByTagName ( "instrumentId" ) ; for ( int index = 0 ; index < temp . getLength ( ) ; index ++ ) { Node id = temp . item ( index ) ; if ( id . getAttributes ( ) . getNamedItem ( "instrumentIdScheme" ) . getTextContent ( ) . equalsIgnoreCase ( "INTERESTRATE" ) ) { forwardCurveName = id . getTextContent ( ) ; break ; } } // Discount curve String [ ] split = forwardCurveName . split ( "_" ) ; String discountCurveName = ( this . discountCurveName == null || this . discountCurveName . length ( ) == 0 ) ? split [ 0 ] + "_" + split [ 1 ] : this . discountCurveName ; InterestRateSwapLegProductDescriptor legReceiver = null ; InterestRateSwapLegProductDescriptor legPayer = null ; // Get descriptors for both legs NodeList legs = doc . getElementsByTagName ( "legAgreement" ) ; for ( int legIndex = 0 ; legIndex < legs . getLength ( ) ; legIndex ++ ) { Element leg = ( Element ) legs . item ( legIndex ) ; boolean isPayer = ( leg . getElementsByTagName ( "payDirection" ) . item ( 0 ) . getTextContent ( ) . equalsIgnoreCase ( "SELLER_TO_BUYER" ) && ! agentIsBuyer ) || ( leg . getElementsByTagName ( "payDirection" ) . item ( 0 ) . getTextContent ( ) . equalsIgnoreCase ( "BUYER_TO_SELLER" ) && agentIsBuyer ) ; boolean isFixed = leg . getElementsByTagName ( "interestType" ) . item ( 0 ) . getTextContent ( ) . equals ( "FIX" ) ; if ( isPayer ) { legPayer = getSwapLegProductDescriptor ( leg , isFixed ? null : forwardCurveName , discountCurveName , daycountConvention ) ; } else { legReceiver = getSwapLegProductDescriptor ( leg , isFixed ? null : forwardCurveName , discountCurveName , daycountConvention ) ; } } return new InterestRateSwapProductDescriptor ( legReceiver , legPayer ) ;
public class PAreaSizeBDGenerator { /** * @ param < S > A phantom type parameter indicating the coordinate space of the * area * @ return A generator initialized with useful defaults */ public static < S > PAreaSizeBDGenerator < S > create ( ) { } }
final LongGenerator gen = new LongGenerator ( 0L , Long . MAX_VALUE ) ; return new PAreaSizeBDGenerator < > ( ( ) -> new BigDecimal ( gen . next ( ) . toString ( ) ) ) ;
public class ImageEncodingImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . IMAGE_ENCODING__COMPRID : return getCOMPRID ( ) ; case AfplibPackage . IMAGE_ENCODING__RECID : return getRECID ( ) ; case AfplibPackage . IMAGE_ENCODING__BITORDR : return getBITORDR ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class AWSDirectoryServiceClient { /** * Creates a snapshot of a Simple AD or Microsoft AD directory in the AWS cloud . * < note > * You cannot take snapshots of AD Connector directories . * < / note > * @ param createSnapshotRequest * Contains the inputs for the < a > CreateSnapshot < / a > operation . * @ return Result of the CreateSnapshot operation returned by the service . * @ throws EntityDoesNotExistException * The specified entity could not be found . * @ throws InvalidParameterException * One or more parameters are not valid . * @ throws SnapshotLimitExceededException * The maximum number of manual snapshots for the directory has been reached . You can use the * < a > GetSnapshotLimits < / a > operation to determine the snapshot limits for a directory . * @ throws ClientException * A client exception has occurred . * @ throws ServiceException * An exception has occurred in AWS Directory Service . * @ sample AWSDirectoryService . CreateSnapshot * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ds - 2015-04-16 / CreateSnapshot " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateSnapshotResult createSnapshot ( CreateSnapshotRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateSnapshot ( request ) ;
public class MiniTemplator { /** * Called recursively . */ private void writeBlockInstances ( StringBuilder out , int blockNo , int parentInstLevel ) { } }
BlockDynTabRec bdtr = blockDynTab [ blockNo ] ; while ( true ) { int blockInstNo = bdtr . currBlockInstNo ; if ( blockInstNo == - 1 ) { break ; } BlockInstTabRec bitr = blockInstTab [ blockInstNo ] ; if ( bitr . parentInstLevel < parentInstLevel ) { throw new AssertionError ( ) ; } if ( bitr . parentInstLevel > parentInstLevel ) { break ; } writeBlockInstance ( out , blockInstNo ) ; bdtr . currBlockInstNo = bitr . nextBlockInstNo ; }
public class OfflinePickerSample { /** * step two , configure our offline tile provider * @ param files */ private void setProviderConfig ( String [ ] files ) { } }
if ( files == null || files . length == 0 ) return ; SimpleRegisterReceiver simpleRegisterReceiver = new SimpleRegisterReceiver ( getContext ( ) ) ; if ( tileWriter != null ) tileWriter . onDetach ( ) ; tileWriter = new SqlTileWriter ( ) ; tileSources . clear ( ) ; List < MapTileModuleProviderBase > providers = new ArrayList < > ( ) ; providers . add ( new MapTileAssetsProvider ( simpleRegisterReceiver , getContext ( ) . getAssets ( ) ) ) ; List < File > geopackages = new ArrayList < > ( ) ; List < File > forgeMaps = new ArrayList < > ( ) ; List < IArchiveFile > archives = new ArrayList < > ( ) ; // this part seperates the geopackage and maps forge stuff since they are handled differently for ( int i = 0 ; i < files . length ; i ++ ) { File archive = new File ( files [ i ] ) ; if ( archive . getName ( ) . endsWith ( "gpkg" ) ) { geopackages . add ( archive ) ; } else if ( archive . getName ( ) . endsWith ( "map" ) ) { forgeMaps . add ( archive ) ; } else { IArchiveFile temp = ArchiveFileFactory . getArchiveFile ( archive ) ; if ( temp != null ) { Set < String > tileSources = temp . getTileSources ( ) ; Iterator < String > iterator = tileSources . iterator ( ) ; while ( iterator . hasNext ( ) ) { this . tileSources . add ( FileBasedTileSource . getSource ( iterator . next ( ) ) ) ; archives . add ( temp ) ; } } } } // setup the standard osmdroid - android library supported offline tile providers IArchiveFile [ ] archArray = new IArchiveFile [ archives . size ( ) ] ; archArray = archives . toArray ( archArray ) ; final MapTileFileArchiveProvider mapTileFileArchiveProvider = new MapTileFileArchiveProvider ( simpleRegisterReceiver , TileSourceFactory . DEFAULT_TILE_SOURCE , archArray ) ; GeoPackageMapTileModuleProvider geopackage = null ; GeoPackageProvider provider = null ; // geopackages if ( ! geopackages . isEmpty ( ) ) { File [ ] maps = new File [ geopackages . size ( ) ] ; maps = geopackages . toArray ( maps ) ; GeoPackageManager manager = GeoPackageFactory . getManager ( getContext ( ) ) ; // Import database for ( File f : maps ) { try { boolean imported = manager . importGeoPackage ( f ) ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; } } provider = new GeoPackageProvider ( maps , getContext ( ) ) ; geopackage = provider . geoPackageMapTileModuleProvider ( ) ; providers . add ( geopackage ) ; List < GeopackageRasterTileSource > geotileSources = new ArrayList < > ( ) ; geotileSources . addAll ( geopackage . getTileSources ( ) ) ; tileSources . addAll ( geotileSources ) ; // TODO add feature tiles here too } MapsForgeTileModuleProvider moduleProvider = null ; if ( ! forgeMaps . isEmpty ( ) ) { // fire up the forge maps . . . XmlRenderTheme theme = null ; try { theme = new AssetsRenderTheme ( getContext ( ) . getApplicationContext ( ) , "renderthemes/" , "rendertheme-v4.xml" ) ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; } File [ ] forge = new File [ forgeMaps . size ( ) ] ; forge = forgeMaps . toArray ( forge ) ; MapsForgeTileSource fromFiles = MapsForgeTileSource . createFromFiles ( forge , theme , "rendertheme-v4" ) ; tileSources . add ( fromFiles ) ; // Create the module provider ; this class provides a TileLoader that // actually loads the tile from the map file . moduleProvider = new MapsForgeTileModuleProvider ( simpleRegisterReceiver , fromFiles , tileWriter ) ; } final MapTileApproximater approximationProvider = new MapTileApproximater ( ) ; approximationProvider . addProvider ( mapTileFileArchiveProvider ) ; if ( geopackage != null ) { providers . add ( geopackage ) ; approximationProvider . addProvider ( geopackage ) ; } if ( moduleProvider != null ) { providers . add ( moduleProvider ) ; approximationProvider . addProvider ( moduleProvider ) ; } providers . add ( mapTileFileArchiveProvider ) ; providers . add ( approximationProvider ) ; MapTileModuleProviderBase [ ] providerArray = new MapTileModuleProviderBase [ providers . size ( ) ] ; for ( int i = 0 ; i < providers . size ( ) ; i ++ ) { providerArray [ i ] = providers . get ( i ) ; } MapTileProviderArray obj = new MapTileProviderArray ( TileSourceFactory . DEFAULT_TILE_SOURCE , simpleRegisterReceiver , providerArray ) ; mMapView . setTileProvider ( obj ) ; // ok everything is setup , we now have 0 or many tile sources available , ask the user promptForTileSource ( ) ;
public class Util { /** * Convert this value to a XML string . * @ param This value ' s tag . * @ param objValue The raw data value . * @ return The XML string for this value . */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public static StringBuffer getXML ( StringBuffer sbXML , String strParam , Object objValue ) { Util . addStartTag ( sbXML , strParam ) ; if ( objValue instanceof Map ) { Util . addXMLMap ( sbXML , ( Map ) objValue ) ; } else { String strValue = Constant . BLANK ; if ( objValue != null ) strValue = objValue . toString ( ) ; if ( Util . isCData ( strValue ) ) strValue = CDATA_START + strValue + CDATA_END ; sbXML . append ( strValue ) ; } return Util . addEndTag ( sbXML , strParam ) . append ( Constant . RETURN ) ;
public class AbstractMongoDAO { /** * finds all elements matching the given query and sorts them accordingly * @ param query the query to search for * @ param sort the sort query to apply * @ param params the parameters to replace # symbols * @ return the list of elements found */ protected final List < T > findSortedByQuery ( String query , String sort , Object ... params ) { } }
return this . dataAccess . findSortedByQuery ( query , sort , params ) ;
public class WaitService { public void wait ( ImageConfiguration imageConfig , Properties projectProperties , String containerId ) throws IOException { } }
List < WaitChecker > checkers = prepareWaitCheckers ( imageConfig , projectProperties , containerId ) ; int timeout = getTimeOut ( imageConfig ) ; if ( checkers . isEmpty ( ) ) { if ( timeout > 0 ) { log . info ( "%s: Pausing for %d ms" , imageConfig . getDescription ( ) , timeout ) ; WaitUtil . sleep ( timeout ) ; } return ; } String logLine = extractCheckerLog ( checkers ) ; ContainerRunningPrecondition precondition = new ContainerRunningPrecondition ( dockerAccess , containerId ) ; try { long waited = WaitUtil . wait ( precondition , timeout , checkers ) ; log . info ( "%s: Waited %s %d ms" , imageConfig . getDescription ( ) , logLine , waited ) ; } catch ( WaitTimeoutException exp ) { String desc = String . format ( "%s: Timeout after %d ms while waiting %s" , imageConfig . getDescription ( ) , exp . getWaited ( ) , logLine ) ; log . error ( desc ) ; throw new IOException ( desc ) ; } catch ( PreconditionFailedException exp ) { String desc = String . format ( "%s: Container stopped with exit code %d unexpectedly after %d ms while waiting %s" , imageConfig . getDescription ( ) , precondition . getExitCode ( ) , exp . getWaited ( ) , logLine ) ; log . error ( desc ) ; throw new IOException ( desc ) ; }
public class PatreonAPI { /** * Retrieve pledges for the specified campaign * @ param campaignId id for campaign to retrieve * @ param pageSize how many pledges to return * @ param pageCursor A cursor retreived from a previous API call , or null for the initial page . * See { @ link # getNextCursorFromDocument ( JSONAPIDocument ) } * @ return the page of pledges * @ throws IOException Thrown when the GET request failed */ public JSONAPIDocument < List < Pledge > > fetchPageOfPledges ( String campaignId , int pageSize , String pageCursor ) throws IOException { } }
return fetchPageOfPledges ( campaignId , pageSize , pageCursor , null ) ;
public class JsHdrsImpl { /** * Get the message handle which uniquely identifies this message . * @ return An SIMessageHandle which identifies this message . */ public SIMessageHandle getMessageHandle ( ) { } }
// If the transient is not set , build the handle from the values in the message and cache it . if ( cachedMessageHandle == null ) { byte [ ] b = ( byte [ ] ) jmo . getField ( JsHdrAccess . SYSTEMMESSAGESOURCEUUID ) ; if ( b != null ) { cachedMessageHandle = new JsMessageHandleImpl ( new SIBUuid8 ( b ) , ( Long ) jmo . getField ( JsHdrAccess . SYSTEMMESSAGEVALUE ) ) ; } else { cachedMessageHandle = new JsMessageHandleImpl ( null , ( Long ) jmo . getField ( JsHdrAccess . SYSTEMMESSAGEVALUE ) ) ; } } // Return the ( possibly newly ) cached value return cachedMessageHandle ;
public class CmsFavoriteDialog { /** * Gets the favorite entry for a given row . * @ param row the widget used to display the favorite * @ return the favorite entry for the widget */ private CmsFavoriteEntry getEntry ( Component row ) { } }
if ( row instanceof CmsFavInfo ) { return ( ( CmsFavInfo ) row ) . getEntry ( ) ; } return null ;
public class SectionLoader { /** * Loads the section that belongs to the header . * This does not instantiate special sections . Use methods like * { @ link # loadImportSection ( ) } or { @ link # loadResourceSection ( ) } instead . * @ param header * the section ' s header * @ return { @ link PESection } that belongs to the header */ public PESection loadSectionFrom ( SectionHeader header ) { } }
long size = getReadSize ( header ) ; long offset = header . getAlignedPointerToRaw ( ) ; return new PESection ( size , offset , header , file ) ;
public class OptionalParamMap { /** * validation : * enforces key length less than 256 UTF8 bytes * enforces valid value type ( cannot be null ) * enforces valid String value length less than 32767 UTF8 bytes */ private void validateEntry ( String key , Object value ) { } }
if ( isBlank ( key ) ) { throw new IllegalArgumentException ( "key cannot be null/blank" ) ; } if ( ModifiedUTF8Charset . calculateByteLength ( key ) > 255 ) { throw new IllegalArgumentException ( "key length > 255 bytes" ) ; } else if ( value == null ) { throw new IllegalArgumentException ( "value cannot be null" ) ; } else if ( value instanceof String ) { if ( ModifiedUTF8Charset . calculateByteLength ( ( String ) value ) > 32767 ) { throw new IllegalArgumentException ( "string value length > 34767 bytes" ) ; } } else if ( value instanceof Integer || value instanceof Long || value instanceof Double ) { // valid } else { throw new IllegalArgumentException ( "Illegal value type: " + value . getClass ( ) . toString ( ) ) ; }
public class NetworkUtils { /** * Returns whether or not the specified address represents an address on * the public Internet . * @ param ia The address to check . * @ return < code > true < / code > if the address is an address on the public * Internet , otherwise < code > false < / code > . */ public static boolean isPublicAddress ( final InetAddress ia ) { } }
// We define public addresses by what they ' re not . A public address // cannot be any one of the following : return ! ia . isSiteLocalAddress ( ) && ! ia . isLinkLocalAddress ( ) && ! ia . isAnyLocalAddress ( ) && ! ia . isLoopbackAddress ( ) && ! ia . isMulticastAddress ( ) ;
public class ReceiveMessageActionParser { /** * Parses validation elements and adds information to the message validation context . * @ param messageElement the message DOM element . * @ param context the message validation context . */ private void parseXPathValidationElements ( Element messageElement , XpathMessageValidationContext context ) { } }
// check for validate elements , these elements can either have script , xpath or namespace validation information // for now we only handle xpath validation Map < String , Object > validateXpathExpressions = new HashMap < > ( ) ; List < ? > validateElements = DomUtils . getChildElementsByTagName ( messageElement , "validate" ) ; if ( validateElements . size ( ) > 0 ) { for ( Iterator < ? > iter = validateElements . iterator ( ) ; iter . hasNext ( ) ; ) { Element validateElement = ( Element ) iter . next ( ) ; extractXPathValidateExpressions ( validateElement , validateXpathExpressions ) ; } context . setXpathExpressions ( validateXpathExpressions ) ; }
public class MtasSolrComponentKwic { /** * ( non - Javadoc ) * @ see * mtas . solr . handler . component . util . MtasSolrComponent # create ( mtas . codec . util . * CodecComponent . BasicComponent , java . lang . Boolean ) */ public SimpleOrderedMap < Object > create ( ComponentKwic kwic , Boolean encode ) { } }
SimpleOrderedMap < Object > mtasKwicResponse = new SimpleOrderedMap < > ( ) ; mtasKwicResponse . add ( "key" , kwic . key ) ; ArrayList < NamedList < Object > > mtasKwicItemResponses = new ArrayList < > ( ) ; if ( kwic . output . equals ( ComponentKwic . KWIC_OUTPUT_HIT ) ) { for ( int docId : kwic . hits . keySet ( ) ) { NamedList < Object > mtasKwicItemResponse = new SimpleOrderedMap < > ( ) ; List < KwicHit > list = kwic . hits . get ( docId ) ; List < NamedList < Object > > mtasKwicItemResponseItems = new ArrayList < > ( ) ; for ( KwicHit h : list ) { NamedList < Object > mtasKwicItemResponseItem = new SimpleOrderedMap < > ( ) ; SortedMap < Integer , List < List < String > > > hitData = new TreeMap < > ( ) ; SortedMap < Integer , List < List < String > > > leftData = null ; SortedMap < Integer , List < List < String > > > rightData = null ; if ( kwic . left > 0 ) { leftData = new TreeMap < > ( ) ; } if ( kwic . right > 0 ) { rightData = new TreeMap < > ( ) ; } for ( int position = Math . max ( 0 , h . startPosition - kwic . left ) ; position <= ( h . endPosition + kwic . right ) ; position ++ ) { if ( h . hits . containsKey ( position ) ) { List < List < String > > hitDataItem = new ArrayList < > ( ) ; for ( String term : h . hits . get ( position ) ) { List < String > hitDataSubItem = new ArrayList < > ( ) ; hitDataSubItem . add ( CodecUtil . termPrefix ( term ) ) ; hitDataSubItem . add ( CodecUtil . termValue ( term ) ) ; hitDataItem . add ( hitDataSubItem ) ; } if ( position < h . startPosition ) { if ( leftData != null ) { leftData . put ( position , hitDataItem ) ; } } else if ( position > h . endPosition ) { if ( rightData != null ) { rightData . put ( position , hitDataItem ) ; } } else { hitData . put ( position , hitDataItem ) ; } } } if ( kwic . left > 0 ) { mtasKwicItemResponseItem . add ( "left" , leftData ) ; } mtasKwicItemResponseItem . add ( "hit" , hitData ) ; if ( kwic . right > 0 ) { mtasKwicItemResponseItem . add ( "right" , rightData ) ; } mtasKwicItemResponseItems . add ( mtasKwicItemResponseItem ) ; } mtasKwicItemResponse . add ( "documentKey" , kwic . uniqueKey . get ( docId ) ) ; mtasKwicItemResponse . add ( "documentTotal" , kwic . subTotal . get ( docId ) ) ; mtasKwicItemResponse . add ( "documentMinPosition" , kwic . minPosition . get ( docId ) ) ; mtasKwicItemResponse . add ( "documentMaxPosition" , kwic . maxPosition . get ( docId ) ) ; mtasKwicItemResponse . add ( "list" , mtasKwicItemResponseItems ) ; mtasKwicItemResponses . add ( mtasKwicItemResponse ) ; } } else if ( kwic . output . equals ( ComponentKwic . KWIC_OUTPUT_TOKEN ) ) { for ( int docId : kwic . tokens . keySet ( ) ) { NamedList < Object > mtasKwicItemResponse = new SimpleOrderedMap < > ( ) ; List < KwicToken > list = kwic . tokens . get ( docId ) ; List < NamedList < Object > > mtasKwicItemResponseItems = new ArrayList < > ( ) ; for ( KwicToken k : list ) { NamedList < Object > mtasKwicItemResponseItem = new SimpleOrderedMap < > ( ) ; mtasKwicItemResponseItem . add ( "startPosition" , k . startPosition ) ; mtasKwicItemResponseItem . add ( "endPosition" , k . endPosition ) ; ArrayList < NamedList < Object > > mtasKwicItemResponseItemTokens = new ArrayList < > ( ) ; for ( MtasToken token : k . tokens ) { NamedList < Object > mtasKwicItemResponseItemToken = new SimpleOrderedMap < > ( ) ; if ( token . getId ( ) != null ) { mtasKwicItemResponseItemToken . add ( "mtasId" , token . getId ( ) ) ; } mtasKwicItemResponseItemToken . add ( "prefix" , token . getPrefix ( ) ) ; mtasKwicItemResponseItemToken . add ( "value" , token . getPostfix ( ) ) ; if ( token . getPositionStart ( ) != null ) { mtasKwicItemResponseItemToken . add ( "positionStart" , token . getPositionStart ( ) ) ; mtasKwicItemResponseItemToken . add ( "positionEnd" , token . getPositionEnd ( ) ) ; } if ( token . getPositions ( ) != null ) { mtasKwicItemResponseItemToken . add ( "positions" , Arrays . toString ( token . getPositions ( ) ) ) ; } if ( token . getParentId ( ) != null ) { mtasKwicItemResponseItemToken . add ( "parentMtasId" , token . getParentId ( ) ) ; } if ( token . getPayload ( ) != null ) { mtasKwicItemResponseItemToken . add ( "payload" , token . getPayload ( ) ) ; } if ( token . getOffsetStart ( ) != null ) { mtasKwicItemResponseItemToken . add ( "offsetStart" , token . getOffsetStart ( ) ) ; mtasKwicItemResponseItemToken . add ( "offsetEnd" , token . getOffsetEnd ( ) ) ; } if ( token . getRealOffsetStart ( ) != null ) { mtasKwicItemResponseItemToken . add ( "realOffsetStart" , token . getRealOffsetStart ( ) ) ; mtasKwicItemResponseItemToken . add ( "realOffsetEnd" , token . getRealOffsetEnd ( ) ) ; } mtasKwicItemResponseItemTokens . add ( mtasKwicItemResponseItemToken ) ; } mtasKwicItemResponseItem . add ( "tokens" , mtasKwicItemResponseItemTokens ) ; mtasKwicItemResponseItems . add ( mtasKwicItemResponseItem ) ; } mtasKwicItemResponse . add ( "documentKey" , kwic . uniqueKey . get ( docId ) ) ; mtasKwicItemResponse . add ( "documentTotal" , kwic . subTotal . get ( docId ) ) ; mtasKwicItemResponse . add ( "documentMinPosition" , kwic . minPosition . get ( docId ) ) ; mtasKwicItemResponse . add ( "documentMaxPosition" , kwic . maxPosition . get ( docId ) ) ; mtasKwicItemResponse . add ( "list" , mtasKwicItemResponseItems ) ; mtasKwicItemResponses . add ( mtasKwicItemResponse ) ; } } mtasKwicResponse . add ( "list" , mtasKwicItemResponses ) ; return mtasKwicResponse ;
public class ST_ConnectedComponents { /** * Calculate the node and edge connected component tables . * @ param connection Connection * @ param inputTable Edges table produced by ST _ Graph * @ param orientation Orientation string * @ return True if the calculation was successful * @ throws SQLException */ public static boolean getConnectedComponents ( Connection connection , String inputTable , String orientation ) throws SQLException { } }
KeyedGraph graph = prepareGraph ( connection , inputTable , orientation , null , VUCent . class , Edge . class ) ; if ( graph == null ) { return false ; } final List < Set < VUCent > > componentsList = getConnectedComponents ( graph , orientation ) ; final TableLocation tableName = TableUtilities . parseInputTable ( connection , inputTable ) ; final TableLocation nodesName = TableUtilities . suffixTableLocation ( tableName , NODE_COMP_SUFFIX ) ; final TableLocation edgesName = TableUtilities . suffixTableLocation ( tableName , EDGE_COMP_SUFFIX ) ; if ( storeNodeConnectedComponents ( connection , nodesName , edgesName , componentsList ) ) { if ( storeEdgeConnectedComponents ( connection , tableName , nodesName , edgesName ) ) { return true ; } } return false ;
public class AppServicePlansInner { /** * Update a Virtual Network gateway . * Update a Virtual Network gateway . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service plan . * @ param vnetName Name of the Virtual Network . * @ param gatewayName Name of the gateway . Only the ' primary ' gateway is supported . * @ param connectionEnvelope Definition of the gateway . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the VnetGatewayInner object */ public Observable < ServiceResponse < VnetGatewayInner > > updateVnetGatewayWithServiceResponseAsync ( String resourceGroupName , String name , String vnetName , String gatewayName , VnetGatewayInner connectionEnvelope ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( vnetName == null ) { throw new IllegalArgumentException ( "Parameter vnetName is required and cannot be null." ) ; } if ( gatewayName == null ) { throw new IllegalArgumentException ( "Parameter gatewayName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( connectionEnvelope == null ) { throw new IllegalArgumentException ( "Parameter connectionEnvelope is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( connectionEnvelope ) ; return service . updateVnetGateway ( resourceGroupName , name , vnetName , gatewayName , this . client . subscriptionId ( ) , connectionEnvelope , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < VnetGatewayInner > > > ( ) { @ Override public Observable < ServiceResponse < VnetGatewayInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < VnetGatewayInner > clientResponse = updateVnetGatewayDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class Triangle3d { /** * { @ inheritDoc } */ @ Override public void setP3 ( Point3D point ) { } }
setP3 ( point . getX ( ) , point . getY ( ) , point . getZ ( ) ) ;
public class CdnConfiguration { /** * Sets the sourceContentConfiguration value for this CdnConfiguration . * @ param sourceContentConfiguration * Parameters about this CDN configuration as a source of content . * This facilitates fetching the * original content for conditioning and delivering the * original content as part of a modified * stream . */ public void setSourceContentConfiguration ( com . google . api . ads . admanager . axis . v201808 . SourceContentConfiguration sourceContentConfiguration ) { } }
this . sourceContentConfiguration = sourceContentConfiguration ;
public class ModularParser { /** * Takes a list of SectionContent and returns a SectionContainer with the * given SectionContent s in the right structure . */ private SectionContainer buildSectionStructure ( List < SectionContent > scl ) { } }
SectionContainer result = new SectionContainer ( 0 ) ; for ( SectionContent sContent : scl ) { int contentLevel = sContent . getLevel ( ) ; SectionContainer sContainer = result ; // get the right SectionContainer or create it for ( int containerLevel = result . getLevel ( ) + 1 ; containerLevel < contentLevel ; containerLevel ++ ) { int containerSubSections = sContainer . nrOfSubSections ( ) ; if ( containerSubSections != 0 ) { Section temp = sContainer . getSubSection ( containerSubSections - 1 ) ; if ( temp . getClass ( ) == SectionContainer . class ) { sContainer = ( SectionContainer ) temp ; } else { SectionContainer sct = new SectionContainer ( temp . getTitleElement ( ) , containerLevel ) ; sct . addSection ( temp ) ; if ( calculateSrcSpans ) { sct . setSrcSpan ( temp . getSrcSpan ( ) ) ; } temp . setTitleElement ( null ) ; temp . setLevel ( containerLevel + 1 ) ; sContainer . removeSection ( temp ) ; sContainer . addSection ( sct ) ; sContainer = sct ; } } else { sContainer = new SectionContainer ( null , containerLevel ) ; } } sContainer . addSection ( sContent ) ; } if ( calculateSrcSpans ) { result . setSrcSpan ( new SrcSpan ( 0 , - 1 ) ) ; } return result ;
public class CmsPropertyAdvanced { /** * Performs the definition of a new property . < p > * @ return true , if the new property was created , otherwise false * @ throws CmsException if creation is not successful */ private boolean performDefineOperation ( ) throws CmsException { } }
boolean useTempfileProject = Boolean . valueOf ( getParamUsetempfileproject ( ) ) . booleanValue ( ) ; try { if ( useTempfileProject ) { switchToTempProject ( ) ; } String newProperty = getParamNewproperty ( ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( newProperty ) ) { getCms ( ) . createPropertyDefinition ( newProperty ) ; return true ; } else { throw new CmsException ( Messages . get ( ) . container ( Messages . ERR_INVALID_PROP_0 ) ) ; } } finally { if ( useTempfileProject ) { switchToCurrentProject ( ) ; } }
public class Timezone { /** * / * [ deutsch ] * < p > Liefert den anzuzeigenden Zeitzonennamen . < / p > * < p > Ist der Zeitzonenname nicht ermittelbar , wird die ID der Zeitzone * geliefert . < / p > * @ param style name style * @ param locale language setting * @ return localized timezone name for display purposes * @ see java . util . TimeZone # getDisplayName ( boolean , int , Locale ) * java . util . TimeZone . getDisplayName ( boolean , int , Locale ) * @ see Locale # getDefault ( ) * @ see # getID ( ) */ public String getDisplayName ( NameStyle style , Locale locale ) { } }
return getDisplayName ( this . getID ( ) , style , locale ) ;
public class SentStructureJsComponent { /** * Get the qualified name of all annotations belonging to relations having a * specific namespace . * @ param graph * The graph . * @ param namespace * The namespace of the relation ( not the annotation ) to search for . * If namespace is null all namespaces will be considered . * @ param type * Which type of relation to include * @ return */ private static Set < String > getRelationLevelSet ( SDocumentGraph graph , String namespace , Class < ? extends SRelation > type ) { } }
Set < String > result = new TreeSet < > ( ) ; if ( graph != null ) { List < ? extends SRelation > edges = null ; if ( type == SDominanceRelation . class ) { edges = graph . getDominanceRelations ( ) ; } else if ( type == SPointingRelation . class ) { edges = graph . getPointingRelations ( ) ; } else if ( type == SSpanningRelation . class ) { edges = graph . getSpanningRelations ( ) ; } if ( edges != null ) { for ( SRelation < ? , ? > edge : edges ) { Set < SLayer > layers = edge . getLayers ( ) ; for ( SLayer layer : layers ) { if ( namespace == null || namespace . equals ( layer . getName ( ) ) ) { for ( SAnnotation anno : edge . getAnnotations ( ) ) { result . add ( anno . getQName ( ) ) ; } // we got all annotations of this edge , jump to next edge break ; } // end if namespace equals layer name } // end for each layer } // end for each edge } } return result ;
public class InetAddressMatchInterfaceCriteria { /** * { @ inheritDoc } * @ return < code > getAddress ( ) < / code > if the < code > address < / code > is the same as the one returned by { @ link # getAddress ( ) } . */ @ Override protected InetAddress isAcceptable ( NetworkInterface networkInterface , InetAddress address ) throws SocketException { } }
try { InetAddress toMatch = getAddress ( ) ; // One time only warn against use of wildcard addresses if ( ! anyLocalLogged && toMatch . isAnyLocalAddress ( ) ) { MGMT_OP_LOGGER . invalidWildcardAddress ( this . address , INET_ADDRESS , ANY_ADDRESS ) ; anyLocalLogged = true ; } if ( toMatch . equals ( address ) ) { if ( toMatch instanceof Inet6Address ) { return matchIPv6 ( ( Inet6Address ) toMatch , ( Inet6Address ) address ) ; } return toMatch ; } } catch ( UnknownHostException e ) { // One time only log a warning if ( ! unknownHostLogged ) { MGMT_OP_LOGGER . cannotResolveAddress ( this . address ) ; unknownHostLogged = true ; } return null ; } return null ;
public class ServerRequestQueue { /** * < p > Determines whether the queue contains a session / app close request . < / p > * @ return A { @ link Boolean } value indicating whether or not the queue contains a * session close request . < i > True < / i > if the queue contains a close request , * < i > False < / i > if not . */ boolean containsClose ( ) { } }
synchronized ( reqQueueLockObject ) { for ( ServerRequest req : queue ) { if ( req != null && req . getRequestPath ( ) . equals ( Defines . RequestPath . RegisterClose . getPath ( ) ) ) { return true ; } } } return false ;
public class Detector { /** * < p > Estimates module size based on two finder patterns - - it uses * { @ link # sizeOfBlackWhiteBlackRunBothWays ( int , int , int , int ) } to figure the * width of each , measuring along the axis between their centers . < / p > */ private float calculateModuleSizeOneWay ( ResultPoint pattern , ResultPoint otherPattern ) { } }
float moduleSizeEst1 = sizeOfBlackWhiteBlackRunBothWays ( ( int ) pattern . getX ( ) , ( int ) pattern . getY ( ) , ( int ) otherPattern . getX ( ) , ( int ) otherPattern . getY ( ) ) ; float moduleSizeEst2 = sizeOfBlackWhiteBlackRunBothWays ( ( int ) otherPattern . getX ( ) , ( int ) otherPattern . getY ( ) , ( int ) pattern . getX ( ) , ( int ) pattern . getY ( ) ) ; if ( Float . isNaN ( moduleSizeEst1 ) ) { return moduleSizeEst2 / 7.0f ; } if ( Float . isNaN ( moduleSizeEst2 ) ) { return moduleSizeEst1 / 7.0f ; } // Average them , and divide by 7 since we ' ve counted the width of 3 black modules , // and 1 white and 1 black module on either side . Ergo , divide sum by 14. return ( moduleSizeEst1 + moduleSizeEst2 ) / 14.0f ;
public class StrSubstitutor { /** * Sets the variable suffix to use . * The variable suffix is the character or characters that identify the * end of a variable . This method allows a string suffix to be easily set . * @ param suffix the suffix for variables , not null * @ return this , to enable chaining * @ throws IllegalArgumentException if the suffix is null */ public StrSubstitutor setVariableSuffix ( final String suffix ) { } }
if ( suffix == null ) { throw new IllegalArgumentException ( "Variable suffix must not be null!" ) ; } return setVariableSuffixMatcher ( StrMatcher . stringMatcher ( suffix ) ) ;
public class EnumMap { /** * Returns all of the values comprising K . * The result is uncloned , cached , and shared by all callers . */ private static < K extends Enum < K > > K [ ] getKeyUniverse ( Class < K > keyType ) { } }
// Android - changed : Use JavaLangAccess directly instead of going through // SharedSecrets . return JavaLangAccess . getEnumConstantsShared ( keyType ) ;
public class Socks5 { public void buildConnection ( ) throws IOException { } }
if ( inetAddress == null ) { throw new IllegalArgumentException ( "Please set inetAddress before calling buildConnection." ) ; } if ( proxySocket == null ) { proxySocket = createProxySocket ( inetAddress , port ) ; } else if ( ! proxySocket . isConnected ( ) ) { proxySocket . connect ( new InetSocketAddress ( inetAddress , port ) ) ; } if ( SocksAuthenticationHelper . shouldAuthenticate ( proxySocket ) ) { SocksAuthenticationHelper . performUserPasswordAuthentication ( this ) ; }
public class FileSystemPartitionView { /** * Build partition constraints for the partition URI location . * @ param view a { @ code FileSystemPartitionView } containing { @ code location } * @ param relative a relative URI for a partition within the given view * @ return a set of constraints that match the location */ private static Constraints constraints ( FileSystemPartitionView < ? > view , @ Nullable URI relative ) { } }
DatasetDescriptor descriptor = view . dataset . getDescriptor ( ) ; if ( relative == null ) { // no partitions are selected , so no additional constraints return view . constraints ; } Preconditions . checkArgument ( descriptor . isPartitioned ( ) , "Dataset is not partitioned" ) ; Constraints constraints = view . constraints ; Schema schema = descriptor . getSchema ( ) ; PartitionStrategy strategy = descriptor . getPartitionStrategy ( ) ; PathConversion conversion = new PathConversion ( schema ) ; Iterator < String > parts = PATH_SPLITTER . split ( relative . getRawPath ( ) ) . iterator ( ) ; for ( FieldPartitioner < ? , ? > fp : Accessor . getDefault ( ) . getFieldPartitioners ( strategy ) ) { if ( ! parts . hasNext ( ) ) { break ; } constraints = constraints . with ( fp . getName ( ) , conversion . valueForDirname ( fp , parts . next ( ) ) ) ; } Preconditions . checkArgument ( ! parts . hasNext ( ) , "%s is deeper than the partition strategy" , relative ) ; return constraints ;
public class BandwidthClient { /** * Helper method to build the GET request for the server . * @ param path the path . * @ param paramMap the parameters map . * @ return the get object . */ protected HttpGet generateGetRequest ( final String path , final Map < String , Object > paramMap ) { } }
final List < NameValuePair > pairs = new ArrayList < NameValuePair > ( ) ; for ( final String key : paramMap . keySet ( ) ) { pairs . add ( new BasicNameValuePair ( key , paramMap . get ( key ) . toString ( ) ) ) ; } final URI uri = buildUri ( path , pairs ) ; return new HttpGet ( uri ) ;
public class HerokuAPI { /** * Checks if maintenance mode is enabled for the given app * @ param appName See { @ link # listApps } for a list of apps that can be used . * @ return true if maintenance mode is enabled */ public boolean isMaintenanceModeEnabled ( String appName ) { } }
App app = connection . execute ( new AppInfo ( appName ) , apiKey ) ; return app . isMaintenance ( ) ;
public class HelixUtils { /** * Create a Helix cluster for the Gobblin Cluster application . * @ param zkConnectionString the ZooKeeper connection string * @ param clusterName the Helix cluster name * @ param overwrite true to overwrite exiting cluster , false to reuse existing cluster */ public static void createGobblinHelixCluster ( String zkConnectionString , String clusterName , boolean overwrite ) { } }
ClusterSetup clusterSetup = new ClusterSetup ( zkConnectionString ) ; // Create the cluster and overwrite if it already exists clusterSetup . addCluster ( clusterName , overwrite ) ; // Helix 0.6 . x requires a configuration property to have the form key = value . String autoJoinConfig = ZKHelixManager . ALLOW_PARTICIPANT_AUTO_JOIN + "=true" ; clusterSetup . setConfig ( HelixConfigScope . ConfigScopeProperty . CLUSTER , clusterName , autoJoinConfig ) ;
public class JobMaster { @ Override public CompletableFuture < Acknowledge > cancel ( Time timeout ) { } }
executionGraph . cancel ( ) ; return CompletableFuture . completedFuture ( Acknowledge . get ( ) ) ;
public class ImportHandler { /** * Resolve a class name . * @ param name The name of the class ( without package name ) to be resolved . * @ return If the class has been imported previously , with * { @ link # importClass } or { @ link # importPackage } , then its * Class instance . Otherwise < code > null < / code > . * @ throws ELException if the class is abstract or is an interface , or * not public . */ public Class < ? > resolveClass ( String name ) { } }
String className = classNameMap . get ( name ) ; if ( className != null ) { return resolveClassFor ( className ) ; } for ( String packageName : packages ) { String fullClassName = packageName + "." + name ; Class < ? > c = resolveClassFor ( fullClassName ) ; if ( c != null ) { classNameMap . put ( name , fullClassName ) ; return c ; } } return null ;
public class IIDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setColor ( Integer newColor ) { } }
Integer oldColor = color ; color = newColor ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IID__COLOR , oldColor , color ) ) ;
public class ScheduledDropwizardReporter { @ Override public void notifyOfAddedMetric ( Metric metric , String metricName , MetricGroup group ) { } }
final String fullName = group . getMetricIdentifier ( metricName , this ) ; synchronized ( this ) { if ( metric instanceof Counter ) { counters . put ( ( Counter ) metric , fullName ) ; registry . register ( fullName , new FlinkCounterWrapper ( ( Counter ) metric ) ) ; } else if ( metric instanceof Gauge ) { gauges . put ( ( Gauge < ? > ) metric , fullName ) ; registry . register ( fullName , FlinkGaugeWrapper . fromGauge ( ( Gauge < ? > ) metric ) ) ; } else if ( metric instanceof Histogram ) { Histogram histogram = ( Histogram ) metric ; histograms . put ( histogram , fullName ) ; if ( histogram instanceof DropwizardHistogramWrapper ) { registry . register ( fullName , ( ( DropwizardHistogramWrapper ) histogram ) . getDropwizardHistogram ( ) ) ; } else { registry . register ( fullName , new FlinkHistogramWrapper ( histogram ) ) ; } } else if ( metric instanceof Meter ) { Meter meter = ( Meter ) metric ; meters . put ( meter , fullName ) ; if ( meter instanceof DropwizardMeterWrapper ) { registry . register ( fullName , ( ( DropwizardMeterWrapper ) meter ) . getDropwizardMeter ( ) ) ; } else { registry . register ( fullName , new FlinkMeterWrapper ( meter ) ) ; } } else { log . warn ( "Cannot add metric of type {}. This indicates that the reporter " + "does not support this metric type." , metric . getClass ( ) . getName ( ) ) ; } }
public class ControlBean { /** * Returns the target control instance associated with this ControlBean , performing lazy * instantiation and initialization of the instance . * REVIEW : could probably improve the granularity of locking here , but start w / just * synchronizing the entire fn . */ public synchronized Object ensureControl ( ) { } }
if ( _control == null ) { // See if the property map specifies an implementation class for the control ; // if not , use default binding . String implBinding = null ; BaseProperties bp = _properties . getPropertySet ( BaseProperties . class ) ; if ( bp != null ) implBinding = bp . controlImplementation ( ) ; else implBinding = ControlUtils . getDefaultControlBinding ( _controlIntf ) ; try { _implClass = _controlIntf . getClassLoader ( ) . loadClass ( implBinding ) ; // Validate that the specified implementation class has an @ ControlImplementation // annotation , else downstream requirements ( such as having a valid control init // class ) will not be met . if ( _implClass . getAnnotation ( ControlImplementation . class ) == null ) { throw new ControlException ( "@" + ControlImplementation . class . getName ( ) + " annotation is missing from control implementation class: " + _implClass . getName ( ) ) ; } } catch ( ClassNotFoundException cnfe ) { throw new ControlException ( "Unable to load control implementation: " + implBinding , cnfe ) ; } // Cache the threading policy associated with the impl Threading thr = ( Threading ) _implClass . getAnnotation ( Threading . class ) ; if ( thr != null ) _threadingPolicy = thr . value ( ) ; else _threadingPolicy = ThreadingPolicy . SINGLE_THREADED ; // default to single - threaded ensureThreadingBehaviour ( ) ; try { // Create and initialize the new instance _control = _implClass . newInstance ( ) ; try { /* Run the ImplInitializer . This class is code generated based on metadata from a control implementation . If a Control implementation declares event handlers for the ControlBeanContext or for the ResourceContext , executing this code generated class will add the appropriate LifeCycle and / or Resource event listeners . */ getImplInitializer ( ) . initialize ( this , _control ) ; _hasServices = true ; } catch ( Exception e ) { throw new ControlException ( "Control initialization failure" , e ) ; } // Once the control is initialized , then allow the associated context // to do any initialization . ControlBeanContext cbcs = getBeanContextProxy ( ) ; /* Implementation note : this call will run the LifeCycleListener ( s ) that have been wired - up to the ControlBeanContext object associated with this ControlBean . */ cbcs . initializeControl ( ) ; } catch ( RuntimeException re ) { // never mask RuntimeExceptions throw re ; } catch ( Exception e ) { throw new ControlException ( "Unable to create control instance" , e ) ; } } // If the implementation instance does not currently have contextual services , they // are lazily restored here . if ( ! _hasServices ) { getImplInitializer ( ) . initServices ( this , _control ) ; _hasServices = true ; } return _control ;
public class StackedAreaRenderer2 { /** * Mostly copied from the base class . */ @ Override public void drawItem ( Graphics2D g2 , CategoryItemRendererState state , Rectangle2D dataArea , CategoryPlot plot , CategoryAxis domainAxis , ValueAxis rangeAxis , CategoryDataset dataset , int row , int column , int pass ) { } }
// plot non - null values . . . Number dataValue = dataset . getValue ( row , column ) ; if ( dataValue == null ) { return ; } double value = dataValue . doubleValue ( ) ; // leave the y values ( y1 , y0 ) untranslated as it is going to be be // stacked up later by previous series values , after this it will be // translated . double xx1 = domainAxis . getCategoryMiddle ( column , getColumnCount ( ) , dataArea , plot . getDomainAxisEdge ( ) ) ; double previousHeightx1 = getPreviousHeight ( dataset , row , column ) ; double y1 = value + previousHeightx1 ; RectangleEdge location = plot . getRangeAxisEdge ( ) ; double yy1 = rangeAxis . valueToJava2D ( y1 , dataArea , location ) ; g2 . setPaint ( getItemPaint ( row , column ) ) ; g2 . setStroke ( getItemStroke ( row , column ) ) ; // add an item entity , if this information is being collected EntityCollection entities = state . getEntityCollection ( ) ; // in column zero , the only job to do is draw any visible item labels // and this is done in the second pass . . . if ( column == 0 ) { if ( pass == 1 ) { // draw item labels , if visible if ( isItemLabelVisible ( row , column ) ) { drawItemLabel ( g2 , plot . getOrientation ( ) , dataset , row , column , xx1 , yy1 , ( y1 < 0.0 ) ) ; } } } else { Number previousValue = dataset . getValue ( row , column - 1 ) ; if ( previousValue != null ) { double xx0 = domainAxis . getCategoryMiddle ( column - 1 , getColumnCount ( ) , dataArea , plot . getDomainAxisEdge ( ) ) ; double y0 = previousValue . doubleValue ( ) ; // Get the previous height , but this will be different for both // y0 and y1 as the previous series values could differ . double previousHeightx0 = getPreviousHeight ( dataset , row , column - 1 ) ; // Now stack the current y values on top of the previous values . y0 += previousHeightx0 ; // Now translate the previous heights double previousHeightxx0 = rangeAxis . valueToJava2D ( previousHeightx0 , dataArea , location ) ; double previousHeightxx1 = rangeAxis . valueToJava2D ( previousHeightx1 , dataArea , location ) ; // Now translate the current y values . double yy0 = rangeAxis . valueToJava2D ( y0 , dataArea , location ) ; if ( pass == 0 ) { // left half Polygon p = new Polygon ( ) ; p . addPoint ( ( int ) xx0 , ( int ) yy0 ) ; p . addPoint ( ( int ) ( xx0 + xx1 ) / 2 , ( int ) ( yy0 + yy1 ) / 2 ) ; p . addPoint ( ( int ) ( xx0 + xx1 ) / 2 , ( int ) ( previousHeightxx0 + previousHeightxx1 ) / 2 ) ; p . addPoint ( ( int ) xx0 , ( int ) previousHeightxx0 ) ; g2 . setPaint ( getItemPaint ( row , column - 1 ) ) ; g2 . setStroke ( getItemStroke ( row , column - 1 ) ) ; g2 . fill ( p ) ; if ( entities != null ) addItemEntity ( entities , dataset , row , column - 1 , p ) ; // right half p = new Polygon ( ) ; p . addPoint ( ( int ) xx1 , ( int ) yy1 ) ; p . addPoint ( ( int ) ( xx0 + xx1 ) / 2 , ( int ) ( yy0 + yy1 ) / 2 ) ; p . addPoint ( ( int ) ( xx0 + xx1 ) / 2 , ( int ) ( previousHeightxx0 + previousHeightxx1 ) / 2 ) ; p . addPoint ( ( int ) xx1 , ( int ) previousHeightxx1 ) ; g2 . setPaint ( getItemPaint ( row , column ) ) ; g2 . setStroke ( getItemStroke ( row , column ) ) ; g2 . fill ( p ) ; if ( entities != null ) addItemEntity ( entities , dataset , row , column , p ) ; } else { if ( isItemLabelVisible ( row , column ) ) { drawItemLabel ( g2 , plot . getOrientation ( ) , dataset , row , column , xx1 , yy1 , ( y1 < 0.0 ) ) ; } } } }
public class DescribeDirectConnectGatewayAssociationsResult { /** * Information about the associations . * @ return Information about the associations . */ public java . util . List < DirectConnectGatewayAssociation > getDirectConnectGatewayAssociations ( ) { } }
if ( directConnectGatewayAssociations == null ) { directConnectGatewayAssociations = new com . amazonaws . internal . SdkInternalList < DirectConnectGatewayAssociation > ( ) ; } return directConnectGatewayAssociations ;
public class InMemoryMetricEmitter { /** * filter snapshots by evenly selecting points across the interval * @ param selectedLists list of snapshots */ private void generalSelectMetricHistory ( final LinkedList < InMemoryHistoryNode > selectedLists ) { } }
logger . debug ( "selecting snapshots evenly from across the time interval" ) ; if ( selectedLists . size ( ) > this . numInstances ) { final double step = ( double ) selectedLists . size ( ) / this . numInstances ; long nextIndex = 0 , currentIndex = 0 , numSelectedInstances = 1 ; final Iterator < InMemoryHistoryNode > ite = selectedLists . iterator ( ) ; while ( ite . hasNext ( ) ) { ite . next ( ) ; if ( currentIndex == nextIndex ) { nextIndex = ( long ) Math . floor ( numSelectedInstances * step + 0.5 ) ; numSelectedInstances ++ ; } else { ite . remove ( ) ; } currentIndex ++ ; } }
public class MoveOnEventHandler { /** * Set the field or file that owns this listener . * @ param owner My owner . */ public void setOwner ( ListenerOwner owner ) { } }
super . setOwner ( owner ) ; if ( this . getOwner ( ) == null ) return ; if ( m_fldDest . getRecord ( ) != this . getOwner ( ) ) // If field is not in this file , remember to remove it m_fldDest . addListener ( new FieldRemoveBOnCloseHandler ( this ) ) ; if ( m_fldSource != null ) if ( m_fldSource . getRecord ( ) != this . getOwner ( ) ) if ( m_fldSource . getRecord ( ) != m_fldDest . getRecord ( ) ) m_fldSource . addListener ( new FieldRemoveBOnCloseHandler ( this ) ) ; if ( ( this . getOwner ( ) . getEditMode ( ) == DBConstants . EDIT_CURRENT ) || ( this . getOwner ( ) . getEditMode ( ) == DBConstants . EDIT_IN_PROGRESS ) ) if ( m_bMoveOnValid ) this . moveTheData ( DBConstants . DISPLAY , DBConstants . INIT_MOVE ) ; // Do trigger a record change . if ( this . getOwner ( ) . getEditMode ( ) == DBConstants . EDIT_ADD ) if ( m_bMoveOnNew ) this . moveTheData ( DBConstants . DISPLAY , DBConstants . INIT_MOVE ) ; // Do trigger a record change .
public class MessageDataDesc { /** * Initialize new BaseTrxMessage . * @ param objRawMessage The ( optional ) raw data of the message . */ public void init ( MessageDataParent messageDataParent , String strKey ) { } }
m_messageDataParent = messageDataParent ; m_strKey = strKey ; if ( messageDataParent != null ) messageDataParent . addMessageDataDesc ( this ) ;
public class Application { /** * Display this URL in a web browser . * Uses the applet or jnlp context . * @ param strURL The local URL to display ( not fully qualified ) . * @ param iOptions ThinMenuConstants . HELP _ WINDOW _ CHANGE If help pane is already displayed , change to this content . * @ param The applet ( optional ) . * @ return True if successfully displayed . */ public boolean showTheDocument ( String strURL , BaseAppletReference applet , int iOptions ) { } }
if ( applet != null ) return applet . showTheDocument ( this , strURL , iOptions ) ; return false ; // Override this
public class PatchRuleGroup { /** * The rules that make up the rule group . * @ return The rules that make up the rule group . */ public java . util . List < PatchRule > getPatchRules ( ) { } }
if ( patchRules == null ) { patchRules = new com . amazonaws . internal . SdkInternalList < PatchRule > ( ) ; } return patchRules ;
public class HistoricJobLogManager { /** * update / / / / / */ public void addRemovalTimeToJobLogByRootProcessInstanceId ( String rootProcessInstanceId , Date removalTime ) { } }
Map < String , Object > parameters = new HashMap < > ( ) ; parameters . put ( "rootProcessInstanceId" , rootProcessInstanceId ) ; parameters . put ( "removalTime" , removalTime ) ; getDbEntityManager ( ) . updatePreserveOrder ( HistoricJobLogEventEntity . class , "updateJobLogByRootProcessInstanceId" , parameters ) ;
public class Cache { /** * Finds all matching sets or something that causes the lookup to stop . */ protected synchronized SetResponse lookup ( Name name , int type , int minCred ) { } }
int labels ; int tlabels ; Element element ; Name tname ; Object types ; SetResponse sr ; labels = name . labels ( ) ; for ( tlabels = labels ; tlabels >= 1 ; tlabels -- ) { boolean isRoot = ( tlabels == 1 ) ; boolean isExact = ( tlabels == labels ) ; if ( isRoot ) tname = Name . root ; else if ( isExact ) tname = name ; else tname = new Name ( name , labels - tlabels ) ; types = data . get ( tname ) ; if ( types == null ) continue ; /* * If this is the name , look for the actual type or a CNAME * ( unless it ' s an ANY query , where we return everything ) . * Otherwise , look for a DNAME . */ if ( isExact && type == Type . ANY ) { sr = new SetResponse ( SetResponse . SUCCESSFUL ) ; Element [ ] elements = allElements ( types ) ; int added = 0 ; for ( int i = 0 ; i < elements . length ; i ++ ) { element = elements [ i ] ; if ( element . expired ( ) ) { removeElement ( tname , element . getType ( ) ) ; continue ; } if ( ! ( element instanceof CacheRRset ) ) continue ; if ( element . compareCredibility ( minCred ) < 0 ) continue ; sr . addRRset ( ( CacheRRset ) element ) ; added ++ ; } /* There were positive entries */ if ( added > 0 ) return sr ; } else if ( isExact ) { element = oneElement ( tname , types , type , minCred ) ; if ( element != null && element instanceof CacheRRset ) { sr = new SetResponse ( SetResponse . SUCCESSFUL ) ; sr . addRRset ( ( CacheRRset ) element ) ; return sr ; } else if ( element != null ) { sr = new SetResponse ( SetResponse . NXRRSET ) ; return sr ; } element = oneElement ( tname , types , Type . CNAME , minCred ) ; if ( element != null && element instanceof CacheRRset ) { return new SetResponse ( SetResponse . CNAME , ( CacheRRset ) element ) ; } } else { element = oneElement ( tname , types , Type . DNAME , minCred ) ; if ( element != null && element instanceof CacheRRset ) { return new SetResponse ( SetResponse . DNAME , ( CacheRRset ) element ) ; } } /* Look for an NS */ element = oneElement ( tname , types , Type . NS , minCred ) ; if ( element != null && element instanceof CacheRRset ) return new SetResponse ( SetResponse . DELEGATION , ( CacheRRset ) element ) ; /* Check for the special NXDOMAIN element . */ if ( isExact ) { element = oneElement ( tname , types , 0 , minCred ) ; if ( element != null ) return SetResponse . ofType ( SetResponse . NXDOMAIN ) ; } } return SetResponse . ofType ( SetResponse . UNKNOWN ) ;
public class SyncAgentsInner { /** * Lists databases linked to a sync agent . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server on which the sync agent is hosted . * @ param syncAgentName The name of the sync agent . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < SyncAgentLinkedDatabaseInner > > listLinkedDatabasesAsync ( final String resourceGroupName , final String serverName , final String syncAgentName , final ListOperationCallback < SyncAgentLinkedDatabaseInner > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( listLinkedDatabasesSinglePageAsync ( resourceGroupName , serverName , syncAgentName ) , new Func1 < String , Observable < ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > > call ( String nextPageLink ) { return listLinkedDatabasesNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class ResolverConfig { /** * Looks in the system properties to find servers and a search path . * Servers are defined by dns . server = server1 , server2 . . . * The search path is defined by dns . search = domain1 , domain2 . . . */ private boolean findProperty ( ) { } }
String prop ; List lserver = new ArrayList ( 0 ) ; List lsearch = new ArrayList ( 0 ) ; StringTokenizer st ; prop = System . getProperty ( "dns.server" ) ; if ( prop != null ) { st = new StringTokenizer ( prop , "," ) ; while ( st . hasMoreTokens ( ) ) addServer ( st . nextToken ( ) , lserver ) ; } prop = System . getProperty ( "dns.search" ) ; if ( prop != null ) { st = new StringTokenizer ( prop , "," ) ; while ( st . hasMoreTokens ( ) ) addSearch ( st . nextToken ( ) , lsearch ) ; } configureFromLists ( lserver , lsearch ) ; return ( servers != null && searchlist != null ) ;
public class Types { /** * < editor - fold defaultstate = " collapsed " desc = " Greatest lower bound " > */ public Type glb ( List < Type > ts ) { } }
Type t1 = ts . head ; for ( Type t2 : ts . tail ) { if ( t1 . isErroneous ( ) ) return t1 ; t1 = glb ( t1 , t2 ) ; } return t1 ;
public class Async { /** * Invokes the specified function asynchronously and returns an Observable that emits the result . * Note : The function is called immediately and once , not whenever an observer subscribes to the resulting * Observable . Multiple subscriptions to this Observable observe the same return value . * < img width = " 640 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / start . png " alt = " " > * @ param < R > the result value type * @ param func function to run asynchronously * @ return an Observable that emits the function ' s result value , or notifies observers of an exception * @ see < a href = " https : / / github . com / ReactiveX / RxJava / wiki / Async - Operators # wiki - start " > RxJava Wiki : start ( ) < / a > * @ see < a href = " http : / / msdn . microsoft . com / en - us / library / hh229036 . aspx " > MSDN : Observable . Start < / a > */ public static < R > Observable < R > startCallable ( Callable < ? extends R > func ) { } }
return startCallable ( func , Schedulers . computation ( ) ) ;
public class P1_QueryOp { /** * 批量关联 , 要求批量操作的都是相同的类 */ private < T > void postHandleRelatedColumn ( List < T > tList , String ... relatedColumnProperties ) { } }
if ( tList == null || tList . isEmpty ( ) ) { return ; } JoinTable joinTable = DOInfoReader . getJoinTable ( tList . get ( 0 ) . getClass ( ) ) ; if ( joinTable != null ) { // 处理join的方式 List < Object > list1 = new ArrayList < Object > ( ) ; List < Object > list2 = new ArrayList < Object > ( ) ; Field joinLeftTableFiled = DOInfoReader . getJoinLeftTable ( tList . get ( 0 ) . getClass ( ) ) ; Field joinRightTableFiled = DOInfoReader . getJoinRightTable ( tList . get ( 0 ) . getClass ( ) ) ; for ( T t : tList ) { Object obj1 = DOInfoReader . getValue ( joinLeftTableFiled , t ) ; if ( obj1 != null ) { list1 . add ( obj1 ) ; } Object obj2 = DOInfoReader . getValue ( joinRightTableFiled , t ) ; if ( obj2 != null ) { list2 . add ( obj2 ) ; } } postHandleRelatedColumn ( list1 ) ; postHandleRelatedColumn ( list2 ) ; return ; } SQLAssert . allSameClass ( tList ) ; Class < ? > clazz = tList . get ( 0 ) . getClass ( ) ; List < Field > relatedColumns = DOInfoReader . getRelatedColumns ( clazz ) ; for ( Field field : relatedColumns ) { // 只处理指定的field if ( relatedColumnProperties != null && relatedColumnProperties . length > 0 ) { boolean isContain = false ; for ( String property : relatedColumnProperties ) { if ( property != null && property . equals ( field . getName ( ) ) ) { isContain = true ; break ; } } if ( ! isContain ) { continue ; } } RelatedColumn column = field . getAnnotation ( RelatedColumn . class ) ; if ( column . localColumn ( ) . trim ( ) . isEmpty ( ) ) { LOGGER . warn ( "relatedColumn value is empty, field:{}" , field ) ; continue ; } if ( column . remoteColumn ( ) . trim ( ) . isEmpty ( ) ) { LOGGER . warn ( "remoteColumn value is empty, field:{}" , field ) ; continue ; } Field localField = DOInfoReader . getFieldByDBField ( clazz , column . localColumn ( ) ) ; if ( localField == null ) { LOGGER . error ( "cannot find localField,db column name:{}" , column . localColumn ( ) ) ; continue ; } // 批量查询数据库 , 提高效率的关键 Class < ? > remoteDOClass ; if ( field . getType ( ) == List . class ) { remoteDOClass = DOInfoReader . getGenericFieldType ( field ) ; } else { remoteDOClass = field . getType ( ) ; } Field remoteField = DOInfoReader . getFieldByDBField ( remoteDOClass , column . remoteColumn ( ) ) ; if ( remoteField == null ) { LOGGER . error ( "cannot find remoteField,db column name:{}" , column . remoteColumn ( ) ) ; continue ; } Set < Object > values = new HashSet < Object > ( ) ; // 用于去重 for ( T t : tList ) { Object value = DOInfoReader . getValue ( localField , t ) ; if ( value != null ) { values . add ( value ) ; } } if ( values . isEmpty ( ) ) { // 不需要查询数据库 , 但是对List的 , 设置空List , 确保list不会是null if ( field . getType ( ) == List . class ) { for ( T t : tList ) { DOInfoReader . setValue ( field , t , new ArrayList < Object > ( ) ) ; } } continue ; } List < ? > relateValues ; if ( column . dataService ( ) != void . class && IDBHelperDataService . class . isAssignableFrom ( column . dataService ( ) ) ) { IDBHelperDataService dataService = ( IDBHelperDataService ) applicationContext . getBean ( column . dataService ( ) ) ; if ( dataService == null ) { LOGGER . error ( "dataService is null for {}" , column . dataService ( ) ) ; relateValues = new ArrayList < Object > ( ) ; } else { relateValues = dataService . get ( new ArrayList < Object > ( values ) , clazz , column . localColumn ( ) , remoteDOClass , column . remoteColumn ( ) ) ; } } else { String inExpr = column . remoteColumn ( ) + " in (?)" ; if ( column . extraWhere ( ) == null || column . extraWhere ( ) . trim ( ) . isEmpty ( ) ) { relateValues = getAll ( remoteDOClass , "where " + inExpr , values ) ; } else { String where ; try { where = SQLUtils . insertWhereAndExpression ( column . extraWhere ( ) , inExpr ) ; relateValues = getAll ( remoteDOClass , where , values ) ; } catch ( JSQLParserException e ) { LOGGER . error ( "wrong RelatedColumn extraWhere:{}, ignore extraWhere" , column . extraWhere ( ) ) ; relateValues = getAll ( remoteDOClass , "where " + inExpr , values ) ; } } } if ( field . getType ( ) == List . class ) { Map < Object , List < Object > > mapRemoteValues = new HashMap < Object , List < Object > > ( ) ; Map < String , List < Object > > mapRemoteValuesString = new HashMap < String , List < Object > > ( ) ; for ( Object obj : relateValues ) { Object oRemoteValue = DOInfoReader . getValue ( remoteField , obj ) ; if ( oRemoteValue == null ) { continue ; } List < Object > oRemoteValueList = mapRemoteValues . get ( oRemoteValue ) ; if ( oRemoteValueList == null ) { oRemoteValueList = new ArrayList < Object > ( ) ; mapRemoteValues . put ( oRemoteValue , oRemoteValueList ) ; } oRemoteValueList . add ( obj ) ; List < Object > oRemoteValueListString = mapRemoteValuesString . get ( oRemoteValue . toString ( ) ) ; if ( oRemoteValueListString == null ) { oRemoteValueListString = new ArrayList < Object > ( ) ; mapRemoteValuesString . put ( oRemoteValue . toString ( ) , oRemoteValueListString ) ; } oRemoteValueListString . add ( obj ) ; } for ( T t : tList ) { List < Object > valueList = new ArrayList < Object > ( ) ; Object oLocalValue = DOInfoReader . getValue ( localField , t ) ; if ( oLocalValue != null ) { List < Object > objRemoteList = mapRemoteValues . get ( oLocalValue ) ; if ( objRemoteList != null ) { valueList = objRemoteList ; } else { List < Object > objRemoteStringList = mapRemoteValuesString . get ( oLocalValue . toString ( ) ) ; if ( objRemoteStringList != null ) { LOGGER . warn ( "@RelatedColumn fields local:{},remote:{} is different classes. Use String compare." , localField , remoteField ) ; valueList = objRemoteList ; } } } if ( valueList . isEmpty ( ) ) { // 没有匹配数据时 , 当原字段有值 , 则不修改原来的值 if ( DOInfoReader . getValue ( field , t ) == null ) { DOInfoReader . setValue ( field , t , valueList ) ; } } else { DOInfoReader . setValue ( field , t , valueList ) ; } } } else { Map < Object , Object > mapRemoteValues = new HashMap < Object , Object > ( ) ; Map < String , Object > mapRemoteValuesString = new HashMap < String , Object > ( ) ; for ( Object obj : relateValues ) { Object oRemoteValue = DOInfoReader . getValue ( remoteField , obj ) ; if ( oRemoteValue != null ) { mapRemoteValues . put ( oRemoteValue , obj ) ; mapRemoteValuesString . put ( oRemoteValue . toString ( ) , obj ) ; } } for ( T t : tList ) { Object oLocalValue = DOInfoReader . getValue ( localField , t ) ; if ( oLocalValue == null ) { continue ; } Object objRemote = mapRemoteValues . get ( oLocalValue ) ; if ( objRemote != null ) { DOInfoReader . setValue ( field , t , objRemote ) ; continue ; } Object objRemoteString = mapRemoteValuesString . get ( oLocalValue . toString ( ) ) ; if ( objRemoteString != null ) { LOGGER . warn ( "@RelatedColumn fields local:{},remote:{} is different classes. Use String compare." , localField , remoteField ) ; DOInfoReader . setValue ( field , t , objRemoteString ) ; } } } }
public class AmazonLightsailClient { /** * Deletes the specified disk snapshot . * When you make periodic snapshots of a disk , the snapshots are incremental , and only the blocks on the device that * have changed since your last snapshot are saved in the new snapshot . When you delete a snapshot , only the data * not needed for any other snapshot is removed . So regardless of which prior snapshots have been deleted , all * active snapshots will have access to all the information needed to restore the disk . * The < code > delete disk snapshot < / code > operation supports tag - based access control via resource tags applied to * the resource identified by diskSnapshotName . For more information , see the < a * href = " https : / / lightsail . aws . amazon . com / ls / docs / en / articles / amazon - lightsail - controlling - access - using - tags " * > Lightsail Dev Guide < / a > . * @ param deleteDiskSnapshotRequest * @ return Result of the DeleteDiskSnapshot operation returned by the service . * @ throws ServiceException * A general service exception . * @ throws InvalidInputException * Lightsail throws this exception when user input does not conform to the validation rules of an input * field . < / p > < note > * Domain - related APIs are only available in the N . Virginia ( us - east - 1 ) Region . Please set your AWS Region * configuration to us - east - 1 to create , view , or edit these resources . * @ throws NotFoundException * Lightsail throws this exception when it cannot find a resource . * @ throws OperationFailureException * Lightsail throws this exception when an operation fails to execute . * @ throws AccessDeniedException * Lightsail throws this exception when the user cannot be authenticated or uses invalid credentials to * access a resource . * @ throws AccountSetupInProgressException * Lightsail throws this exception when an account is still in the setup in progress state . * @ throws UnauthenticatedException * Lightsail throws this exception when the user has not been authenticated . * @ sample AmazonLightsail . DeleteDiskSnapshot * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lightsail - 2016-11-28 / DeleteDiskSnapshot " target = " _ top " > AWS * API Documentation < / a > */ @ Override public DeleteDiskSnapshotResult deleteDiskSnapshot ( DeleteDiskSnapshotRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteDiskSnapshot ( request ) ;
public class SipHasherStream { /** * Updates the hash with a single byte . * This will only modify the internal ` m ` value , nothing will be modified * in the actual ` v * ` states until an 8 - byte block has been provided . * @ param b * the byte being added to the digest . * @ return * the same { @ link SipHasherStream } for chaining . */ public final SipHasherStream update ( byte b ) { } }
this . len ++ ; this . m |= ( ( ( long ) b & 0xff ) << ( this . m_idx ++ * 8 ) ) ; if ( this . m_idx < 8 ) { return this ; } this . v3 ^= this . m ; for ( int i = 0 ; i < this . c ; i ++ ) { round ( ) ; } this . v0 ^= this . m ; this . m_idx = 0 ; this . m = 0 ; return this ;
public class EJSHome { /** * d146034.6 */ public EJSWrapperCommon activateBean ( BeanId beanId , ContainerTx currentTx ) throws CSIException , RemoteException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; // d532639.2 if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "activateBean (" + beanId + ")" , currentTx ) ; homeEnabled ( ) ; EJSWrapperCommon result = null ; try { // For Single - object finder methods ( other than findByPrimaryKey ) , // the Persistence Manager is required to perform a flush of the // Entity beans prior to the query . If this flush was not // performed , then throw an exception indicating the tx is in an // illegal state . if ( currentTx . ivFlushRequired ) { IllegalStateException isex = new IllegalStateException ( "Persistence Manager failed to perform synchronization " + "of Entity beans prior to find<METHOD>" ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "activateBean" , isex ) ; throw isex ; } // Look to see if this beanId is already cached with its serialized // byte array to improve performance . This also causes the beanId // in the wrapper to = = the one in the BeanO . beanId = wrapperManager . beanIdCache . find ( beanId ) ; container . activator . activateBean ( EJSContainer . getThreadData ( ) , currentTx , beanId ) ; // d630940 result = wrapperManager . getWrapper ( beanId ) ; // d156807.1 } catch ( NoSuchObjectException ex ) { FFDCFilter . processException ( ex , CLASS_NAME + ".activateBean" , "998" , this ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "activateBean : NoSuchObjectException" , ex ) ; // Something very odd has happened . The collection does not care // that the object doesn ' t exist , nor should it have to deal with // it , so just create a wrapper instead of activating . . . . . result = getWrapper ( beanId ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "activateBean" , result ) ; return result ;
public class WFG1 { /** * WFG1 t3 transformation * @ throws org . uma . jmetal . util . JMetalException */ public float [ ] t3 ( float [ ] z ) throws JMetalException { } }
float [ ] result = new float [ z . length ] ; for ( int i = 0 ; i < z . length ; i ++ ) { result [ i ] = ( new Transformations ( ) ) . bPoly ( z [ i ] , ( float ) 0.02 ) ; } return result ;
public class JXMultiSplitPanePanel { /** * Factory method for creating the new { @ link JXMultiSplitPane } . This method is invoked in the * constructor from the derived classes and can be overridden so users can provide their own * version of a { @ link JXMultiSplitPane } * @ param layoutDefinition * the layout definition * @ return the new { @ link JXMultiSplitPane } */ protected JXMultiSplitPane newJXMultiSplitPane ( String layoutDefinition ) { } }
JXMultiSplitPane msp = new JXMultiSplitPane ( ) ; MultiSplitLayout . Node modelRoot = newRootNode ( layoutDefinition ) ; msp . getMultiSplitLayout ( ) . setModel ( modelRoot ) ; msp . setBorder ( BorderFactory . createEmptyBorder ( 4 , 4 , 4 , 4 ) ) ; return msp ;
public class Import { /** * Adds a set of ` DataPoint ` s to a series . * @ param series The series the DataPoints should be added to . If the series doesn ' t exist , * it will be created . * @ param dataPoints Data to be added . */ public void addDataPointSet ( String series , Set < DataPoint > dataPoints ) { } }
DataSet set = store . get ( series ) ; if ( set == null ) { set = new DataSet ( dataPoints ) ; store . put ( series , set ) ; } else { set . addAll ( dataPoints ) ; }
public class Article { /** * Removes a single media content item from the list * @ param mediaContent The media content object to remove */ public Article removeMediaContent ( MediaContent mediaContent ) { } }
if ( mediaContent == null || this . mediaContentVec == null ) return this ; this . mediaContentVec . remove ( mediaContent ) ; return this ;
public class AttributeValue { /** * An attribute of type String Set . For example : * < code > " SS " : [ " Giraffe " , " Hippo " , " Zebra " ] < / code > * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSS ( java . util . Collection ) } or { @ link # withSS ( java . util . Collection ) } if you want to override the * existing values . * @ param sS * An attribute of type String Set . For example : < / p > * < code > " SS " : [ " Giraffe " , " Hippo " , " Zebra " ] < / code > * @ return Returns a reference to this object so that method calls can be chained together . */ public AttributeValue withSS ( String ... sS ) { } }
if ( this . sS == null ) { setSS ( new java . util . ArrayList < String > ( sS . length ) ) ; } for ( String ele : sS ) { this . sS . add ( ele ) ; } return this ;
public class VersionList { /** * Removes all snapshots from this list */ public void removeSnapshots ( ) { } }
VersionList versionToRemove = new VersionList ( ) ; // collect Versions to be removed for ( Version ver : this ) { if ( ver . isSnapshot ( ) ) { versionToRemove . add ( ver ) ; } } // remove them this . removeAll ( versionToRemove ) ;
public class XmlOutputGenerator { /** * Returns the sorted xml as an OutputStream . * @ return the sorted xml */ public String getSortedXml ( Document newDocument ) { } }
try ( ByteArrayOutputStream sortedXml = new ByteArrayOutputStream ( ) ) { BufferedLineSeparatorOutputStream bufferedLineOutputStream = new BufferedLineSeparatorOutputStream ( lineSeparatorUtil . toString ( ) , sortedXml ) ; XMLOutputter xmlOutputter = new PatchedXMLOutputter ( bufferedLineOutputStream , indentBlankLines ) ; xmlOutputter . setFormat ( createPrettyFormat ( ) ) ; xmlOutputter . output ( newDocument , bufferedLineOutputStream ) ; bufferedLineOutputStream . close ( ) ; return sortedXml . toString ( encoding ) ; } catch ( IOException ioex ) { throw new FailureException ( "Could not format pom files content" , ioex ) ; }
public class WField { /** * Sets the desired width of the input field , as a percentage of the available space . * @ param inputWidth the percentage width , or & lt ; = 0 to use the default field width . */ public void setInputWidth ( final int inputWidth ) { } }
if ( inputWidth > 100 ) { throw new IllegalArgumentException ( "inputWidth (" + inputWidth + ") cannot be greater than 100 percent." ) ; } getOrCreateComponentModel ( ) . inputWidth = Math . max ( 0 , inputWidth ) ;
public class UpdateReconciliationLineItemReports { /** * Runs the example . * @ param adManagerServices the services factory . * @ param session the session . * @ param reconciliationLineItemReportId the ID of the reconciliation line item report to * retrieve . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session , long reconciliationLineItemReportId ) throws RemoteException { } }
// Get the ReconciliationLineItemReportService . ReconciliationLineItemReportServiceInterface reconciliationLineItemReportService = adManagerServices . get ( session , ReconciliationLineItemReportServiceInterface . class ) ; // Create a statement to select a reconciliation line item report . StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "id = :lineItemReportId" ) . orderBy ( "id ASC" ) . limit ( 1 ) . withBindVariableValue ( "lineItemReportId" , reconciliationLineItemReportId ) ; // Get reconciliation line item reports by statement . ReconciliationLineItemReportPage page = reconciliationLineItemReportService . getReconciliationLineItemReportsByStatement ( statementBuilder . toStatement ( ) ) ; ReconciliationLineItemReport lineItemReport = Iterables . getOnlyElement ( Arrays . asList ( page . getResults ( ) ) ) ; // Set and use a manual volume for billing . This example splits the difference between // Ad Manager and the third party volume . lineItemReport . setManualVolume ( ( lineItemReport . getDfpVolume ( ) + lineItemReport . getThirdPartyVolume ( ) ) / 2 ) ; lineItemReport . setReconciliationSource ( BillFrom . MANUAL ) ; ReconciliationLineItemReport [ ] updatedLineItemReports = reconciliationLineItemReportService . updateReconciliationLineItemReports ( new ReconciliationLineItemReport [ ] { lineItemReport } ) ; for ( ReconciliationLineItemReport updatedLineItemReport : updatedLineItemReports ) { System . out . printf ( "Reconciliation line item report with ID %d for line item ID %d was " + "updated, with manual volume %d.%n" , updatedLineItemReport . getId ( ) , updatedLineItemReport . getLineItemId ( ) , updatedLineItemReport . getManualVolume ( ) ) ; }
public class Event { /** * Sets the channels that the event is fired on if no channels * are specified explicitly when firing the event * ( see { @ link org . jgrapes . core . Manager # fire ( Event , Channel . . . ) } ) . * @ param channels the channels to set * @ return the object for easy chaining * @ throws IllegalStateException if the method is called after * this event has been fired */ public Event < T > setChannels ( Channel ... channels ) { } }
if ( enqueued ( ) ) { throw new IllegalStateException ( "Channels cannot be changed after fire" ) ; } this . channels = Arrays . copyOf ( channels , channels . length ) ; return this ;
public class AbstractDestinationHandler { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # addTargettingAlias ( com . ibm . ws . sib . processor . impl . AliasDestinationHandler ) */ public void addTargettingAlias ( DestinationHandler aliasDestinationHandler ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addTargettingAlias" , aliasDestinationHandler ) ; if ( aliasesThatTargetThisDest == null ) { aliasesThatTargetThisDest = new java . util . ArrayList < DestinationHandler > ( ) ; } synchronized ( aliasesThatTargetThisDest ) { aliasesThatTargetThisDest . add ( aliasDestinationHandler ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addTargettingAlias" ) ;
public class LObjBoolPredicateBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T > LObjBoolPredicate < T > objBoolPredicateFrom ( Consumer < LObjBoolPredicateBuilder < T > > buildingFunction ) { } }
LObjBoolPredicateBuilder builder = new LObjBoolPredicateBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class AnnotationRef { /** * Writes the given annotation to the visitor . */ public void write ( T instance , ClassVisitor visitor ) { } }
doWrite ( instance , visitor . visitAnnotation ( typeDescriptor , isRuntimeVisible ) ) ;