signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class AggregationDeserializer { /** * Checks the next { @ link JsonToken } to decide the next appropriate parsing method .
* @ param in { @ link JsonReader } object used for parsing
* @ param objMap Map used to build the structure for the resulting { @ link QueryAggregation } object
* @ throws IOException signals that there has been an IO exception */
private void parseNext ( JsonReader in , HashMap < String , Object > objMap ) throws IOException { } } | JsonToken token = in . peek ( ) ; String lastName = "" ; if ( token == JsonToken . NAME ) { lastName = in . nextName ( ) ; token = in . peek ( ) ; } switch ( token ) { case BEGIN_ARRAY : parseArray ( in , objMap , lastName ) ; break ; case BEGIN_OBJECT : parseObject ( in , objMap , lastName ) ; break ; case STRING : objMap . put ( lastName , in . nextString ( ) ) ; break ; case NUMBER : objMap . put ( lastName , in . nextDouble ( ) ) ; break ; case BOOLEAN : objMap . put ( lastName , in . nextBoolean ( ) ) ; break ; default : throw new IOException ( "Unexpected JSON token encountered" ) ; } collapseMap ( objMap ) ; |
public class TRow { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */
public boolean isSet ( _Fields field ) { } } | if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case COL_VALS : return isSetColVals ( ) ; } throw new IllegalStateException ( ) ; |
public class ModifyDocumentPermissionRequest { /** * The AWS user accounts that should no longer have access to the document . The AWS user account can either be a
* group of account IDs or < i > All < / i > . This action has a higher priority than < i > AccountIdsToAdd < / i > . If you specify
* an account ID to add and the same ID to remove , the system removes access to the document .
* @ param accountIdsToRemove
* The AWS user accounts that should no longer have access to the document . The AWS user account can either
* be a group of account IDs or < i > All < / i > . This action has a higher priority than < i > AccountIdsToAdd < / i > . If
* you specify an account ID to add and the same ID to remove , the system removes access to the document . */
public void setAccountIdsToRemove ( java . util . Collection < String > accountIdsToRemove ) { } } | if ( accountIdsToRemove == null ) { this . accountIdsToRemove = null ; return ; } this . accountIdsToRemove = new com . amazonaws . internal . SdkInternalList < String > ( accountIdsToRemove ) ; |
public class XmlPrintStream { /** * Output a complete element with the given content .
* @ param elementName Name of element .
* @ param value Content of element . */
public void printElement ( String elementName , Object value ) { } } | println ( "<" + elementName + ">" + ( value == null ? "" : escape ( value . toString ( ) ) ) + "</" + elementName + ">" ) ; |
public class ExtensionFilter { /** * Function to filter files based on defined rules . */
@ Override public boolean accept ( File file ) { } } | // All directories are added in the least that can be read by the Application
if ( file . isDirectory ( ) && file . canRead ( ) ) { return true ; } else if ( properties . selection_type == DialogConfigs . DIR_SELECT ) { /* True for files , If the selection type is Directory type , ie .
* Only directory has to be selected from the list , then all files are
* ignored . */
return false ; } else { /* Check whether name of the file ends with the extension . Added if it
* does . */
String name = file . getName ( ) . toLowerCase ( Locale . getDefault ( ) ) ; for ( String ext : validExtensions ) { if ( name . endsWith ( ext ) ) { return true ; } } } return false ; |
public class AnnotationTypeOptionalMemberBuilder { /** * Build the default value for this optional member .
* @ param node the XML element that specifies which components to document
* @ param annotationDocTree the content tree to which the documentation will be added */
public void buildDefaultValueInfo ( XMLNode node , Content annotationDocTree ) { } } | ( ( AnnotationTypeOptionalMemberWriter ) writer ) . addDefaultValueInfo ( ( MemberDoc ) members . get ( currentMemberIndex ) , annotationDocTree ) ; |
public class TimelineModel { /** * Deletes all given events in the model with UI update .
* @ param events collection of events to be deleted
* @ param timelineUpdater TimelineUpdater instance to delete the events in UI */
public void deleteAll ( Collection < TimelineEvent > events , TimelineUpdater timelineUpdater ) { } } | if ( events != null && ! events . isEmpty ( ) ) { for ( TimelineEvent event : events ) { delete ( event , timelineUpdater ) ; } } |
public class RestController { /** * Does a rsql / fiql query , returns the result as csv
* < p > Parameters :
* < p > q : the query
* < p > attributes : the attributes to return , if not specified returns all attributes
* < p > start : the index of the first row , default 0
* < p > num : the number of results to return , default 100 , max 10000
* < p > Example : / api / v1 / csv / person ? q = firstName = = Piet & attributes = firstName , lastName & start = 10 & num = 100 */
@ GetMapping ( value = "/csv/{entityTypeId}" , produces = "text/csv" ) @ ResponseBody public EntityCollection retrieveEntityCollection ( @ PathVariable ( "entityTypeId" ) String entityTypeId , @ RequestParam ( value = "attributes" , required = false ) String [ ] attributes , HttpServletRequest req , HttpServletResponse resp ) throws IOException { } } | final Set < String > attributesSet = toAttributeSet ( attributes ) ; EntityType meta ; Iterable < Entity > entities ; try { meta = dataService . getEntityType ( entityTypeId ) ; Query < Entity > q = new QueryStringParser ( meta , molgenisRSQL ) . parseQueryString ( req . getParameterMap ( ) ) ; String [ ] sortAttributeArray = req . getParameterMap ( ) . get ( "sortColumn" ) ; if ( sortAttributeArray != null && sortAttributeArray . length == 1 && StringUtils . isNotEmpty ( sortAttributeArray [ 0 ] ) ) { String sortAttribute = sortAttributeArray [ 0 ] ; String sortOrderArray [ ] = req . getParameterMap ( ) . get ( "sortOrder" ) ; Sort . Direction order = Sort . Direction . ASC ; if ( sortOrderArray != null && sortOrderArray . length == 1 && StringUtils . isNotEmpty ( sortOrderArray [ 0 ] ) ) { String sortOrder = sortOrderArray [ 0 ] ; switch ( sortOrder ) { case "ASC" : order = Sort . Direction . ASC ; break ; case "DESC" : order = Sort . Direction . DESC ; break ; default : throw new RuntimeException ( "unknown sort order" ) ; } } q . sort ( ) . on ( sortAttribute , order ) ; } if ( q . getPageSize ( ) == 0 ) { q . pageSize ( EntityCollectionRequest . DEFAULT_ROW_COUNT ) ; } if ( q . getPageSize ( ) > EntityCollectionRequest . MAX_ROWS ) { resp . sendError ( HttpServletResponse . SC_BAD_REQUEST , "Num exceeded the maximum of " + EntityCollectionRequest . MAX_ROWS + " rows" ) ; return null ; } entities = ( ) -> dataService . findAll ( entityTypeId , q ) . iterator ( ) ; } catch ( ConversionFailedException | RSQLParserException | UnknownAttributeException | IllegalArgumentException | UnsupportedOperationException | UnknownEntityException e ) { resp . sendError ( HttpServletResponse . SC_BAD_REQUEST , e . getMessage ( ) ) ; return null ; } catch ( MolgenisDataAccessException e ) { resp . sendError ( HttpServletResponse . SC_UNAUTHORIZED ) ; return null ; } // Check attribute names
Iterable < String > attributesIterable = Iterables . transform ( meta . getAtomicAttributes ( ) , attribute -> attribute . getName ( ) . toLowerCase ( ) ) ; if ( attributesSet != null ) { SetView < String > diff = Sets . difference ( attributesSet , Sets . newHashSet ( attributesIterable ) ) ; if ( ! diff . isEmpty ( ) ) { resp . sendError ( HttpServletResponse . SC_BAD_REQUEST , "Unknown attributes " + diff ) ; return null ; } } attributesIterable = Iterables . transform ( meta . getAtomicAttributes ( ) , Attribute :: getName ) ; if ( attributesSet != null ) { attributesIterable = Iterables . filter ( attributesIterable , attribute -> attributesSet . contains ( attribute . toLowerCase ( ) ) ) ; } return new DefaultEntityCollection ( entities , attributesIterable ) ; |
public class CasePreservingProteinSequenceCreator { /** * Takes a { @ link ProteinSequence } which was created by a
* { @ link CasePreservingProteinSequenceCreator } . Uses the case info
* stored in the user collection to modify the output array .
* < p > Sets elements of the output array which correspond to lowercase letters
* to null .
* @ param seq Input sequence with case stored as the user collection
* @ param out */
public static void setLowercaseToNull ( ProteinSequence seq , Object [ ] out ) { } } | // should have been set by seq creator
Collection < Object > userCollection = seq . getUserCollection ( ) ; if ( userCollection == null ) throw new IllegalArgumentException ( "Sequence doesn't contain valid case info" ) ; if ( userCollection . size ( ) != out . length ) throw new IllegalArgumentException ( "Sequence length doesn't math output array length" ) ; int pos = 0 ; for ( Object isAligned : userCollection ) { assert ( isAligned instanceof Boolean ) ; if ( ! ( Boolean ) isAligned ) { out [ pos ] = null ; } pos ++ ; } |
public class AtomFeedXmlReader { /** * Parses the atom data without creating the event itself from data & amp ;
* meta data .
* @ param in
* Input stream to read .
* @ return Entry . */
public final AtomEntry < Node > readAtomEntry ( final InputStream in ) { } } | final Document doc = parseDocument ( createDocumentBuilder ( ) , in ) ; final XPath xPath = createXPath ( "atom" , "http://www.w3.org/2005/Atom" ) ; final String eventStreamId = findContentText ( doc , xPath , "/atom:entry/atom:content/eventStreamId" ) ; final Integer eventNumber = findContentInteger ( doc , xPath , "/atom:entry/atom:content/eventNumber" ) ; final String eventType = findContentText ( doc , xPath , "/atom:entry/atom:content/eventType" ) ; final String eventId = findContentText ( doc , xPath , "/atom:entry/atom:content/eventId" ) ; final Node escMetaNode = findNode ( doc , xPath , "/atom:entry/atom:content/metadata/esc-meta" ) ; final String dataContextTypeStr = findContentText ( escMetaNode , xPath , "data-content-type" ) ; final EnhancedMimeType dataContentType = EnhancedMimeType . create ( dataContextTypeStr ) ; final Node data = findNode ( doc , xPath , "/atom:entry/atom:content/data" ) ; final EnhancedMimeType metaContentType ; final String metaTypeStr ; final Node meta ; if ( hasMetaData ( escMetaNode ) ) { final String metaContentTypeStr = findContentText ( escMetaNode , xPath , "meta-content-type" ) ; metaContentType = EnhancedMimeType . create ( metaContentTypeStr ) ; metaTypeStr = findContentText ( escMetaNode , xPath , "meta-type" ) ; meta = escMetaNode ; } else { metaContentType = null ; metaTypeStr = null ; meta = null ; } return new AtomEntry < Node > ( eventStreamId , eventNumber , eventType , eventId , dataContentType , metaContentType , metaTypeStr , data , meta ) ; |
public class Sneaky { /** * returns a value from a lambda ( Supplier ) that can potentially throw an exception .
* @ param supplier Supplier that can throw an exception
* @ param < T > type of supplier ' s return value
* @ return a Supplier as defined in java . util . function */
public static < T , E extends Exception > T sneak ( SneakySupplier < T , E > supplier ) { } } | return sneaked ( supplier ) . get ( ) ; |
public class ByteArray { /** * Converts the given String to a byte array .
* @ param unicodeString The String to be converted to a byte array .
* @ return A byte array representing the String . */
public static byte [ ] fromString ( String unicodeString ) { } } | byte [ ] result = null ; if ( unicodeString != null ) { result = unicodeString . getBytes ( StandardCharsets . UTF_8 ) ; } return result ; |
public class CPDAvailabilityEstimateLocalServiceBaseImpl { /** * Updates the cpd availability estimate in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param cpdAvailabilityEstimate the cpd availability estimate
* @ return the cpd availability estimate that was updated */
@ Indexable ( type = IndexableType . REINDEX ) @ Override public CPDAvailabilityEstimate updateCPDAvailabilityEstimate ( CPDAvailabilityEstimate cpdAvailabilityEstimate ) { } } | return cpdAvailabilityEstimatePersistence . update ( cpdAvailabilityEstimate ) ; |
public class HtmlInputTextarea { /** * < p > Return the value of the < code > rows < / code > property . < / p >
* < p > Contents : The number of rows to be displayed . */
public int getRows ( ) { } } | return ( java . lang . Integer ) getStateHelper ( ) . eval ( PropertyKeys . rows , Integer . MIN_VALUE ) ; |
public class WebAPI { /** * Imports a model from a JSON file . */
public static void importModel ( ) throws Exception { } } | // Upload file to H2O
HttpClient client = new HttpClient ( ) ; PostMethod post = new PostMethod ( URL + "/Upload.json?key=" + JSON_FILE . getName ( ) ) ; Part [ ] parts = { new FilePart ( JSON_FILE . getName ( ) , JSON_FILE ) } ; post . setRequestEntity ( new MultipartRequestEntity ( parts , post . getParams ( ) ) ) ; if ( 200 != client . executeMethod ( post ) ) throw new RuntimeException ( "Request failed: " + post . getStatusLine ( ) ) ; post . releaseConnection ( ) ; // Parse the key into a model
GetMethod get = new GetMethod ( URL + "/2/ImportModel.json?" + "destination_key=MyImportedNeuralNet&" + "type=NeuralNetModel&" + "json=" + JSON_FILE . getName ( ) ) ; if ( 200 != client . executeMethod ( get ) ) throw new RuntimeException ( "Request failed: " + get . getStatusLine ( ) ) ; get . releaseConnection ( ) ; |
public class SoftwareModuleAddUpdateWindow { /** * fill the data of a softwareModule in the content of the window */
private void populateValuesOfSwModule ( ) { } } | if ( baseSwModuleId == null ) { return ; } editSwModule = Boolean . TRUE ; softwareModuleManagement . get ( baseSwModuleId ) . ifPresent ( swModule -> { nameTextField . setValue ( swModule . getName ( ) ) ; versionTextField . setValue ( swModule . getVersion ( ) ) ; vendorTextField . setValue ( swModule . getVendor ( ) ) ; descTextArea . setValue ( swModule . getDescription ( ) ) ; softwareModuleType = new LabelBuilder ( ) . name ( swModule . getType ( ) . getName ( ) ) . caption ( i18n . getMessage ( UIMessageIdProvider . CAPTION_ARTIFACT_SOFTWARE_MODULE_TYPE ) ) . buildLabel ( ) ; } ) ; |
public class InvalidityDateExtension { /** * Set the attribute value . */
public void set ( String name , Object obj ) throws IOException { } } | if ( ! ( obj instanceof Date ) ) { throw new IOException ( "Attribute must be of type Date." ) ; } if ( name . equalsIgnoreCase ( DATE ) ) { date = ( Date ) obj ; } else { throw new IOException ( "Name not supported by InvalidityDateExtension" ) ; } encodeThis ( ) ; |
public class SSLComponent { /** * DS method to deactivate this component .
* @ param context */
@ Deactivate protected synchronized void deactivate ( int reason ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Deactivated: " + reason ) ; } super . deactivate ( MY_ALIAS , reason ) ; repertoireMap . clear ( ) ; repertoirePIDMap . clear ( ) ; keystoreIdMap . clear ( ) ; keystorePidMap . clear ( ) ; SSLConfigManager . getInstance ( ) . clearSSLConfigMap ( ) ; KeyStoreManager . getInstance ( ) . clearKSMap ( ) ; AbstractJSSEProvider . clearSSLContextCache ( ) ; processConfig ( true ) ; this . componentContext = null ; |
public class JcrQueryParser { /** * { @ inheritDoc } */
protected Query getPrefixQuery ( String field , String termStr ) throws ParseException { } } | return getWildcardQuery ( field , termStr + "*" ) ; |
public class ManagedAuditLoggerImpl { /** * Immediate updates - TODO find some better way to do these if we end up adding more handler types ! */
@ Override public void updateHandlerMaxFailureCount ( String name , int count ) { } } | config . lock ( ) ; try { AuditLogHandler handler = config . getConfiguredHandler ( name ) ; handler . setMaxFailureCount ( count ) ; } finally { config . unlock ( ) ; } |
public class SynchronizedCache { /** * 根据预设的缓存清理策略进行缓存清理 */
private synchronized void clean ( ) { } } | while ( checkOverFlow ( ) ) { List < CacheObject > cacheObjectList = sort ( ) ; if ( cacheObjectList != null && cacheObjectList . size ( ) > 0 ) { CacheObject cacheObject = ( CacheObject ) cacheObjectList . get ( 0 ) ; remove ( cacheObject . getKey ( ) ) ; } } |
public class ReflectUtil { /** * 获取指定对象的所有属性 , 包含父类属性
* < p > Function : getFieldArrayExcludeUID < / p >
* < p > Description : 不抓取serialVersionUID属性 < / p >
* @ param clazz
* @ return
* @ author acexy @ thankjava . com
* @ date 2014-12-16 上午11:27:44
* @ version 1.0 */
public static Field [ ] getFieldArrayIncludeSupClassExcludeUID ( Class < ? > clazz ) { } } | Field [ ] currField = clazz . getDeclaredFields ( ) ; clazz = clazz . getSuperclass ( ) ; Field [ ] supField = clazz . getDeclaredFields ( ) ; Field [ ] temp = new Field [ currField . length + supField . length ] ; int length = 0 ; for ( Field curr : currField ) { if ( "serialVersionUID" . equals ( curr . getName ( ) ) ) { continue ; } temp [ length ] = curr ; length ++ ; } for ( Field sup : supField ) { if ( "serialVersionUID" . equals ( sup . getName ( ) ) ) { continue ; } temp [ length ] = sup ; length ++ ; } Field [ ] all = new Field [ length ] ; for ( int i = 0 ; i < all . length ; i ++ ) { all [ i ] = temp [ i ] ; } return all ; |
public class ServletMapPrinterFactory { /** * The setter for setting configuration file . It will convert the value to a URI .
* @ param configurationFiles the configuration file map . */
public final void setConfigurationFiles ( final Map < String , String > configurationFiles ) throws URISyntaxException { } } | this . configurationFiles . clear ( ) ; this . configurationFileLastModifiedTimes . clear ( ) ; for ( Map . Entry < String , String > entry : configurationFiles . entrySet ( ) ) { if ( ! entry . getValue ( ) . contains ( ":/" ) ) { // assume is a file
this . configurationFiles . put ( entry . getKey ( ) , new File ( entry . getValue ( ) ) . toURI ( ) ) ; } else { this . configurationFiles . put ( entry . getKey ( ) , new URI ( entry . getValue ( ) ) ) ; } } if ( this . configFileLoader != null ) { this . validateConfigurationFiles ( ) ; } |
public class StartConfigurationRecorderRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( StartConfigurationRecorderRequest startConfigurationRecorderRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( startConfigurationRecorderRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startConfigurationRecorderRequest . getConfigurationRecorderName ( ) , CONFIGURATIONRECORDERNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DirectoryConnection { /** * Set the DirectoryConnection ConenctionStatus .
* @ param status
* the ConenctionStatus . */
private void setStatus ( ConnectionStatus status ) { } } | if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Set status to - " + status ) ; } if ( ! this . status . equals ( status ) ) { ConnectionStatus pre = this . status ; this . status = status ; eventThread . queueClientEvent ( new ClientStatusEvent ( pre , status ) ) ; } |
public class JSMessageData { /** * Locking : Needs to lock as it relies on the cache & contents remaining in their current state . */
@ Override public int getModelID ( int accessor ) throws JMFUninitializedAccessException , JMFSchemaViolationException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . entry ( this , tc , "getModelID" , new Object [ ] { Integer . valueOf ( accessor ) } ) ; int ans ; checkIndex ( accessor ) ; synchronized ( getMessageLockArtefact ( ) ) { checkDynamic ( accessor ) ; if ( cache [ accessor ] != null ) { ans = ( ( JMFPart ) cache [ accessor ] ) . getModelID ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "getModelID" , Integer . valueOf ( ans ) ) ; return ans ; } if ( contents == null ) { JMFUninitializedAccessException e = new JMFUninitializedAccessException ( "Dynamic value at accessor " + accessor + " is missing" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "getModelID" , e ) ; throw e ; } // Get offset of the dynamic field
int realOffset = getAbsoluteOffset ( accessor ) ; // Read model Id skipping over length
ans = ArrayUtil . readInt ( contents , realOffset + 4 ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "getModelID" , Integer . valueOf ( ans ) ) ; return ans ; |
public class ModuleBuildReader { /** * Adds the specified future to the list of extra builds .
* @ param future The future to add */
public void addExtraBuild ( ModuleBuildFuture future ) { } } | if ( extraBuilds == null ) { extraBuilds = new LinkedList < ModuleBuildFuture > ( ) ; } extraBuilds . add ( future ) ; |
public class DatabaseDAODefaultImpl { public List < DbHistory > getClassPipePropertyHistory ( Database database , String className , String pipeName , String propertyName ) throws DevFailed { } } | String [ ] array = new String [ ] { className , pipeName , propertyName } ; DeviceData argIn = new DeviceData ( ) ; argIn . insert ( array ) ; DeviceData argOut = database . command_inout ( "DbGetClassPipePropertyHist" , argIn ) ; return convertPropertyHistory ( argOut . extractStringArray ( ) , true ) ; |
public class MapFile { /** * Reads POI data for an area defined by the tile in the upper left and the tile in
* the lower right corner .
* This implementation takes the data storage of a MapFile into account for greater efficiency .
* @ param upperLeft tile that defines the upper left corner of the requested area .
* @ param lowerRight tile that defines the lower right corner of the requested area .
* @ return map data for the tile . */
@ Override public MapReadResult readPoiData ( Tile upperLeft , Tile lowerRight ) { } } | return readMapData ( upperLeft , lowerRight , Selector . POIS ) ; |
public class TextProtoNetworkExternalizer { /** * Writes a text table - based proto network and produces a descriptor
* including all of the table files .
* @ param pn
* { @ link ProtoNetwork } , the proto network , which cannot be null
* @ param protoNetworkRootPath
* { @ link String } , the root path where proto network files should
* be created
* @ return { @ link ProtoNetworkDescriptor } , the proto network descriptor
* @ throws ProtoNetworkError Thrown if there was an error writing the { @ link ProtoNetwork }
* @ throws InvalidArgument Thrown if { @ code protoNetwork } is null */
@ Override public ProtoNetworkDescriptor writeProtoNetwork ( ProtoNetwork pn , String protoNetworkRootPath ) throws ProtoNetworkError { } } | if ( pn == null ) throw new InvalidArgument ( "protoNetwork is null" ) ; CSVWriter allWriter ; String path = asPath ( protoNetworkRootPath , "all.tbl" ) ; try { allWriter = new CSVWriter ( new FileWriter ( path ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; } catch ( IOException e ) { final String msg = "Cannot open all tbl debug file for write" ; throw new ProtoNetworkError ( path , msg , e ) ; } CSVWriter tdvWriter ; allWriter . writeNext ( new String [ ] { "Document table" } ) ; // write document table
DocumentTable dt = pn . getDocumentTable ( ) ; File dfile = new File ( protoNetworkRootPath + File . separator + "document.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( dfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "DocumentIndex" , "Name" , "Description" , "Version" , "Copyright" , "Disclaimer" , "ContactInfo" , "Authors" , "Licenses" ) ; List < DocumentHeader > dhs = dt . getDocumentHeaders ( ) ; for ( int i = 0 ; i < dhs . size ( ) ; i ++ ) { DocumentHeader dh = dhs . get ( i ) ; tdvWriter . writeNext ( new String [ ] { String . valueOf ( i ) , dh . getName ( ) == null ? "-" : dh . getName ( ) , dh . getDescription ( ) == null ? "-" : dh . getDescription ( ) , dh . getVersion ( ) == null ? "-" : dh . getVersion ( ) , dh . getCopyright ( ) == null ? "-" : dh . getCopyright ( ) , dh . getDisclaimer ( ) == null ? "-" : dh . getDisclaimer ( ) , dh . getContactInfo ( ) == null ? "-" : dh . getContactInfo ( ) , dh . getAuthors ( ) == null ? "-" : dh . getAuthors ( ) , dh . getLicenses ( ) == null ? "-" : dh . getLicenses ( ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( i ) , dh . getName ( ) == null ? "-" : dh . getName ( ) , dh . getDescription ( ) == null ? "-" : dh . getDescription ( ) , dh . getVersion ( ) == null ? "-" : dh . getVersion ( ) , dh . getCopyright ( ) == null ? "-" : dh . getCopyright ( ) , dh . getDisclaimer ( ) == null ? "-" : dh . getDisclaimer ( ) , dh . getContactInfo ( ) == null ? "-" : dh . getContactInfo ( ) , dh . getAuthors ( ) == null ? "-" : dh . getAuthors ( ) , dh . getLicenses ( ) == null ? "-" : dh . getLicenses ( ) } ) ; } tdvWriter . close ( ) ; } catch ( IOException e ) { final String msg = "Cannot create document file for proto-network" ; throw new ProtoNetworkError ( dfile . getAbsolutePath ( ) , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Namespace table" } ) ; // Write namespace table
NamespaceTable nt = pn . getNamespaceTable ( ) ; File nsfile = new File ( protoNetworkRootPath + File . separator + "namespace.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( nsfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "NamespaceIndex" , "Prefix" , "ResourceLocation" ) ; for ( TableNamespace ns : nt . getNamespaces ( ) ) { String nsp = StringUtils . isBlank ( ns . getPrefix ( ) ) ? "-" : ns . getPrefix ( ) ; tdvWriter . writeNext ( new String [ ] { String . valueOf ( nt . getNamespaceIndex ( ) . get ( ns ) ) , String . valueOf ( nsp ) , String . valueOf ( ns . getResourceLocation ( ) ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( nt . getNamespaceIndex ( ) . get ( ns ) ) , String . valueOf ( nsp ) , String . valueOf ( ns . getResourceLocation ( ) ) } ) ; } tdvWriter . close ( ) ; } catch ( IOException e ) { final String msg = "Cannot create namespace file for proto-network" ; final String name = nsfile . getAbsolutePath ( ) ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Document Namespace table" } ) ; // Write document - namespace table
path = asPath ( protoNetworkRootPath , "document-namespace.tbl" ) ; File dnsfile = new File ( path ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( dnsfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "DocumentIndex" , "NamespaceIndex" ) ; Map < Integer , List < Integer > > dnsi = nt . getDocumentNamespaces ( ) ; for ( final Entry < Integer , List < Integer > > e : dnsi . entrySet ( ) ) { Integer di = e . getKey ( ) ; List < Integer > dnsl = e . getValue ( ) ; if ( noItems ( dnsl ) ) continue ; for ( Integer nsi : dnsl ) { tdvWriter . writeNext ( new String [ ] { String . valueOf ( di ) , String . valueOf ( nsi ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( di ) , String . valueOf ( nsi ) } ) ; } } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = path ; final String msg = "Cannot create document namespace file for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Parameter table" } ) ; // write parameter table
ParameterTable parameterTable = pn . getParameterTable ( ) ; NamespaceTable nsTable = pn . getNamespaceTable ( ) ; File pfile = new File ( protoNetworkRootPath + File . separator + "parameter.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( pfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "ParameterIndex" , "NamespaceIndex" , "Value" , "(Global ParameterIndex)" ) ; for ( final TableParameter parameter : parameterTable . getTableParameters ( ) ) { // fetch the namespace value
TableNamespace tn = parameter . getNamespace ( ) ; Integer nsIndex = nsTable . getNamespaceIndex ( ) . get ( tn ) ; String nsValue = "-" ; if ( nsIndex != null ) { nsValue = String . valueOf ( nsIndex ) ; } if ( parameterTable . getGlobalIndex ( ) . isEmpty ( ) ) { tdvWriter . writeNext ( new String [ ] { String . valueOf ( parameterTable . getTableParameterIndex ( ) . get ( parameter ) ) , nsValue , parameter . getValue ( ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( parameterTable . getTableParameterIndex ( ) . get ( parameter ) ) , nsValue , parameter . getValue ( ) } ) ; } else { Integer globalIndex = parameterTable . getGlobalIndex ( ) . get ( parameterTable . getTableParameterIndex ( ) . get ( parameter ) ) ; tdvWriter . writeNext ( new String [ ] { String . valueOf ( parameterTable . getTableParameterIndex ( ) . get ( parameter ) ) , nsValue , parameter . getValue ( ) , globalIndex . toString ( ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( parameterTable . getTableParameterIndex ( ) . get ( parameter ) ) , nsValue , parameter . getValue ( ) , globalIndex . toString ( ) } ) ; } } tdvWriter . close ( ) ; } catch ( IOException e ) { final String msg = "Cannot create parameter file for proto-network" ; final String name = pfile . getAbsolutePath ( ) ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Term table" } ) ; // write term table
TermTable termTable = pn . getTermTable ( ) ; File tfile = new File ( protoNetworkRootPath + File . separator + "term.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( tfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "TermIndex" , "Value" , "(Global TermIndex)" ) ; List < String > termValues = termTable . getTermValues ( ) ; Map < Integer , Integer > tgi = termTable . getGlobalTermIndex ( ) ; for ( int i = 0 ; i < termValues . size ( ) ; i ++ ) { if ( tgi . isEmpty ( ) ) { tdvWriter . writeNext ( new String [ ] { String . valueOf ( i ) , termValues . get ( i ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( i ) , termValues . get ( i ) } ) ; } else { Integer globalIndex = tgi . get ( i ) ; tdvWriter . writeNext ( new String [ ] { String . valueOf ( i ) , termValues . get ( i ) , globalIndex . toString ( ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( i ) , termValues . get ( i ) , globalIndex . toString ( ) } ) ; } } tdvWriter . close ( ) ; } catch ( IOException e ) { final String msg = "Cannot create term file for proto-network" ; final String name = tfile . getAbsolutePath ( ) ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Term Parameter table" } ) ; // write term - parameter table
TermParameterMapTable termParameterMapTable = pn . getTermParameterMapTable ( ) ; File tpmtfile = new File ( protoNetworkRootPath + File . separator + "term-parameter-map.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( tpmtfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "TermIndex" , "ParameterIndex" , "Ordinal" ) ; for ( final Integer termIndex : termParameterMapTable . getTermParameterIndex ( ) . keySet ( ) ) { List < Integer > parameterIndexes = termParameterMapTable . getTermParameterIndex ( ) . get ( termIndex ) ; int ord = 0 ; for ( Integer parameterIndex : parameterIndexes ) { tdvWriter . writeNext ( new String [ ] { String . valueOf ( termIndex ) , String . valueOf ( parameterIndex ) , String . valueOf ( ord ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( termIndex ) , String . valueOf ( parameterIndex ) , String . valueOf ( ord ) } ) ; ord ++ ; } } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = tpmtfile . getAbsolutePath ( ) ; final String msg = "Cannot create term to parameter mapping file " + "for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Statement table" } ) ; // write statement table
StatementTable statementTable = pn . getStatementTable ( ) ; File sfile = new File ( protoNetworkRootPath + File . separator + "statement.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( sfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "StatementIndex" , "SubjectTermIndex" , "Relationship" , "ObjectTermIndex" , "NestedSubjectTermIndex" , "NestedRelationship" , "NestedObjectTermIndex" ) ; List < TableStatement > statements = statementTable . getStatements ( ) ; for ( int i = 0 ; i < statements . size ( ) ; i ++ ) { TableStatement statement = statements . get ( i ) ; String si = String . valueOf ( i ) ; if ( statement . getRelationshipName ( ) == null ) { tdvWriter . writeNext ( new String [ ] { si , String . valueOf ( statement . getSubjectTermId ( ) ) , "-" , "-" , "-" , "-" , "-" } ) ; allWriter . writeNext ( new String [ ] { si , String . valueOf ( statement . getSubjectTermId ( ) ) , "-" , "-" , "-" , "-" , "-" } ) ; } else if ( statement . getObjectTermId ( ) != null ) { tdvWriter . writeNext ( new String [ ] { si , String . valueOf ( statement . getSubjectTermId ( ) ) , statement . getRelationshipName ( ) , String . valueOf ( statement . getObjectTermId ( ) ) , "-" , "-" , "-" } ) ; allWriter . writeNext ( new String [ ] { si , String . valueOf ( statement . getSubjectTermId ( ) ) , statement . getRelationshipName ( ) , String . valueOf ( statement . getObjectTermId ( ) ) , "-" , "-" , "-" } ) ; } else { tdvWriter . writeNext ( new String [ ] { si , String . valueOf ( statement . getSubjectTermId ( ) ) , statement . getRelationshipName ( ) , String . valueOf ( statement . getObjectTermId ( ) ) , String . valueOf ( statement . getNestedSubject ( ) ) , statement . getNestedRelationship ( ) , String . valueOf ( statement . getNestedObject ( ) ) } ) ; allWriter . writeNext ( new String [ ] { si , String . valueOf ( statement . getSubjectTermId ( ) ) , statement . getRelationshipName ( ) , String . valueOf ( statement . getObjectTermId ( ) ) , String . valueOf ( statement . getNestedSubject ( ) ) , statement . getNestedRelationship ( ) , String . valueOf ( statement . getNestedObject ( ) ) } ) ; } } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = sfile . getAbsolutePath ( ) ; final String msg = "Cannot create statement file for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Statement Document table" } ) ; // Write document - statement table
path = asPath ( protoNetworkRootPath , "document-statement.tbl" ) ; File dsfile = new File ( path ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( dsfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "DocumentIndex" , "StatementIndex" ) ; List < TableStatement > sts = statementTable . getStatements ( ) ; Map < Integer , Integer > dsm = statementTable . getStatementDocument ( ) ; for ( int si = 0 ; si < sts . size ( ) ; si ++ ) { tdvWriter . writeNext ( new String [ ] { String . valueOf ( dsm . get ( si ) ) , String . valueOf ( si ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( dsm . get ( si ) ) , String . valueOf ( si ) } ) ; } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = path ; final String msg = "Cannot create document statement file for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Annotation Definition table" } ) ; // Write annotation definition table
AnnotationDefinitionTable adt = pn . getAnnotationDefinitionTable ( ) ; File adfile = new File ( protoNetworkRootPath + File . separator + "annotation-definition.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( adfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "AnnotationDefinitionIndex" , "Name" , "Description" , "Usage" , "Domain" , "AnnotationDefinitionType" ) ; for ( final TableAnnotationDefinition ad : adt . getAnnotationDefinitions ( ) ) { tdvWriter . writeNext ( new String [ ] { String . valueOf ( adt . getDefinitionIndex ( ) . get ( ad ) ) , ad . getName ( ) , ad . getDescription ( ) == null ? "-" : ad . getDescription ( ) , ad . getUsage ( ) == null ? "-" : ad . getUsage ( ) , ad . getAnnotationDomain ( ) == null ? "-" : ad . getAnnotationDomain ( ) , String . valueOf ( ad . getAnnotationType ( ) ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( adt . getDefinitionIndex ( ) . get ( ad ) ) , ad . getName ( ) , ad . getDescription ( ) == null ? "-" : ad . getDescription ( ) , ad . getUsage ( ) == null ? "-" : ad . getUsage ( ) , ad . getAnnotationDomain ( ) == null ? "-" : ad . getAnnotationDomain ( ) , String . valueOf ( ad . getAnnotationType ( ) ) } ) ; } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = adfile . getAbsolutePath ( ) ; final String msg = "Cannot create annotation definition file for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Document Annotation Definition table" } ) ; // Write document annotation definition table
path = asPath ( protoNetworkRootPath , "document-annotation-definition.tbl" ) ; File dadfile = new File ( path ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( dadfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "DocumentIndex" , "AnnotationDefinitionIndex" ) ; Map < Integer , Set < Integer > > dnsi = adt . getDocumentAnnotationDefinitions ( ) ; for ( final Entry < Integer , Set < Integer > > e : dnsi . entrySet ( ) ) { Integer di = e . getKey ( ) ; Set < Integer > adl = e . getValue ( ) ; if ( noItems ( adl ) ) continue ; for ( Integer adi : adl ) { tdvWriter . writeNext ( new String [ ] { String . valueOf ( di ) , String . valueOf ( adi ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( di ) , String . valueOf ( adi ) } ) ; } } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = path ; final String msg = "Cannot create document annotation definition " + "file for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Annotation Value table" } ) ; // Write annotation value table
AnnotationValueTable avt = pn . getAnnotationValueTable ( ) ; File avfile = new File ( protoNetworkRootPath + File . separator + "annotation-value.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( avfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "AnnotationValueIndex" , "AnnotationDefinitionIndex" , "AnnotationValue" ) ; Map < Integer , TableAnnotationValue > annotationIndex = avt . getIndexValue ( ) ; Set < Integer > annotationIds = annotationIndex . keySet ( ) ; for ( Integer annotationId : annotationIds ) { TableAnnotationValue value = annotationIndex . get ( annotationId ) ; tdvWriter . writeNext ( new String [ ] { String . valueOf ( annotationId ) , String . valueOf ( value . getAnnotationDefinitionId ( ) ) , value . getAnnotationValue ( ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( annotationId ) , String . valueOf ( value . getAnnotationDefinitionId ( ) ) , value . getAnnotationValue ( ) } ) ; } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = avfile . getAbsolutePath ( ) ; final String msg = "Cannot create annotation value file for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Statement Annotation Map table" } ) ; // Write statement annotation map table
StatementAnnotationMapTable samt = pn . getStatementAnnotationMapTable ( ) ; File samtfile = new File ( protoNetworkRootPath + File . separator + "statement-annotation-map.tbl" ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( samtfile ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "StatementIndex" , "AnnotationDefinitionIndex" , "AnnotationValueIndex" ) ; for ( final Map . Entry < Integer , Set < AnnotationPair > > same : samt . getStatementAnnotationPairsIndex ( ) . entrySet ( ) ) { Integer sid = same . getKey ( ) ; for ( AnnotationPair ap : same . getValue ( ) ) { tdvWriter . writeNext ( new String [ ] { String . valueOf ( sid ) , String . valueOf ( ap . getAnnotationDefinitionId ( ) ) , String . valueOf ( ap . getAnnotationValueId ( ) ) } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( sid ) , String . valueOf ( ap . getAnnotationDefinitionId ( ) ) , String . valueOf ( ap . getAnnotationValueId ( ) ) } ) ; } } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = samtfile . getAbsolutePath ( ) ; final String msg = "Cannot create statement annotation map file " + "for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Proto Node Table" } ) ; // Write proto node table
final ProtoNodeTable pnt = pn . getProtoNodeTable ( ) ; final Map < Integer , Integer > eqn = pnt . getEquivalences ( ) ; File pntf = new File ( asPath ( protoNetworkRootPath , "proto-node.tbl" ) ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( pntf ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "ProtoNodeIndex" , "Label" , "EquivalenceIndex" ) ; final List < String > nodes = pnt . getProtoNodes ( ) ; int nodeId = 0 ; for ( int i = 0 , n = nodes . size ( ) ; i < n ; i ++ ) { String nodeidstr = String . valueOf ( nodeId ) ; String eqidstr = String . valueOf ( eqn . get ( i ) ) ; tdvWriter . writeNext ( new String [ ] { nodeidstr , nodes . get ( i ) , eqidstr } ) ; allWriter . writeNext ( new String [ ] { nodeidstr , nodes . get ( i ) , eqidstr } ) ; nodeId ++ ; } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = pntf . getAbsolutePath ( ) ; final String msg = "Cannot create proto node file for proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Term Proto Node Table" } ) ; // Write term proto node table
final TermTable tt = pn . getTermTable ( ) ; File tpnf = new File ( asPath ( protoNetworkRootPath , "term-proto-node.tbl" ) ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( tpnf ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "TermIndex" , "ProtoNodeIndex" ) ; final List < String > terms = tt . getTermValues ( ) ; final Map < Integer , Integer > visited = pnt . getTermNodeIndex ( ) ; for ( int i = 0 , n = terms . size ( ) ; i < n ; i ++ ) { String nodeidstr = String . valueOf ( visited . get ( i ) ) ; tdvWriter . writeNext ( new String [ ] { String . valueOf ( i ) , nodeidstr } ) ; allWriter . writeNext ( new String [ ] { String . valueOf ( i ) , nodeidstr } ) ; } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = tpnf . getAbsolutePath ( ) ; final String msg = "Cannot create term proto node file for " + "proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Proto Edge Table" } ) ; // Write proto edge table
final ProtoEdgeTable pet = pn . getProtoEdgeTable ( ) ; final Map < Integer , Integer > edgeEq = pet . getEquivalences ( ) ; File petf = new File ( asPath ( protoNetworkRootPath , "proto-edge.tbl" ) ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( petf ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "ProtoEdgeIndex" , "SourceProtoNodeIndex" , "Relationship" , "TargetProtoNodeIndex" , "EquivalenceIndex" ) ; final List < TableProtoEdge > edges = pet . getProtoEdges ( ) ; for ( int i = 0 , n = edges . size ( ) ; i < n ; i ++ ) { final TableProtoEdge edge = edges . get ( i ) ; final String ei = String . valueOf ( i ) ; final String source = String . valueOf ( edge . getSource ( ) ) ; final String rel = edge . getRel ( ) ; final String target = String . valueOf ( edge . getTarget ( ) ) ; String eqIndex = edgeEq . get ( i ) . toString ( ) ; tdvWriter . writeNext ( new String [ ] { ei , source , rel , target , eqIndex } ) ; allWriter . writeNext ( new String [ ] { ei , source , rel , target , eqIndex } ) ; } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = tpnf . getAbsolutePath ( ) ; final String msg = "Cannot create proto edge file for " + "proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } allWriter . writeNext ( new String [ 0 ] ) ; allWriter . writeNext ( new String [ ] { "Proto Edge Statement Table" } ) ; // Write proto edge statement table
final List < TableProtoEdge > edges = pet . getProtoEdges ( ) ; File spet = new File ( asPath ( protoNetworkRootPath , "proto-edge-statement.tbl" ) ) ; try { tdvWriter = new CSVWriter ( new FileWriter ( spet ) , '\t' , CSVWriter . NO_QUOTE_CHARACTER ) ; writeHeader ( tdvWriter , allWriter , "ProtoEdgeIndex" , "StatementIndex" ) ; final Map < Integer , Set < Integer > > edgeStmts = pet . getEdgeStatements ( ) ; for ( int i = 0 , n = edges . size ( ) ; i < n ; i ++ ) { final String ei = String . valueOf ( i ) ; final Set < Integer > stmts = edgeStmts . get ( i ) ; if ( hasItems ( stmts ) ) { for ( final Integer stmt : stmts ) { final String stmtstring = String . valueOf ( stmt ) ; tdvWriter . writeNext ( new String [ ] { ei , stmtstring } ) ; allWriter . writeNext ( new String [ ] { ei , stmtstring } ) ; } } } tdvWriter . close ( ) ; } catch ( IOException e ) { final String name = tpnf . getAbsolutePath ( ) ; final String msg = "Cannot create proto edge file for " + "proto-network" ; throw new ProtoNetworkError ( name , msg , e ) ; } try { allWriter . close ( ) ; } catch ( IOException e ) { final String name = samtfile . getAbsolutePath ( ) ; final String msg = "Cannot close all tbl debug file after write" ; throw new ProtoNetworkError ( name , msg , e ) ; } return new TextProtoNetworkDescriptor ( protoNetworkRootPath , dfile , nsfile , dnsfile , pfile , tfile , tpmtfile , sfile , dsfile , adfile , dadfile , avfile , samtfile , pntf , tpnf , petf , spet ) ; |
public class MemberReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return Member ResourceSet */
@ Override public ResourceSet < Member > read ( final TwilioRestClient client ) { } } | return new ResourceSet < > ( this , client , firstPage ( client ) ) ; |
public class MessageUnpacker { /** * Read a byte value at the cursor and proceed the cursor .
* @ return
* @ throws IOException */
private byte readByte ( ) throws IOException { } } | if ( buffer . size ( ) > position ) { byte b = buffer . getByte ( position ) ; position ++ ; return b ; } else { nextBuffer ( ) ; if ( buffer . size ( ) > 0 ) { byte b = buffer . getByte ( 0 ) ; position = 1 ; return b ; } return readByte ( ) ; } |
public class AWSCognitoIdentityProviderClient { /** * Lists the user import jobs .
* @ param listUserImportJobsRequest
* Represents the request to list the user import jobs .
* @ return Result of the ListUserImportJobs operation returned by the service .
* @ throws ResourceNotFoundException
* This exception is thrown when the Amazon Cognito service cannot find the requested resource .
* @ throws InvalidParameterException
* This exception is thrown when the Amazon Cognito service encounters an invalid parameter .
* @ throws TooManyRequestsException
* This exception is thrown when the user has made too many requests for a given operation .
* @ throws NotAuthorizedException
* This exception is thrown when a user is not authorized .
* @ throws InternalErrorException
* This exception is thrown when Amazon Cognito encounters an internal error .
* @ sample AWSCognitoIdentityProvider . ListUserImportJobs
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / ListUserImportJobs " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public ListUserImportJobsResult listUserImportJobs ( ListUserImportJobsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListUserImportJobs ( request ) ; |
public class ItemsInterval { /** * Create a new item based on the existing items and new service period
* < ul >
* < li > During the build phase , we only consider ADD items . This happens when for instance an existing item was partially repaired
* and there is a need to create a new item which represents the part left - - that was not repaired .
* < li > During the merge phase , we create new items that are the missing repaired items ( CANCEL ) .
* < / ul >
* @ param startDate start date of the new item to create
* @ param endDate end date of the new item to create
* @ param mergeMode mode to consider .
* @ return new item for this service period or null */
private Item createNewItem ( @ Nullable final LocalDate startDate , @ Nullable final LocalDate endDate , @ Nullable final UUID targetInvoiceId , final boolean mergeMode ) { } } | // Find the ADD ( build phase ) or CANCEL ( merge phase ) item of this interval
final Item item = getResultingItem ( mergeMode ) ; if ( item == null || startDate == null || endDate == null || targetInvoiceId == null ) { return item ; } // Prorate ( build phase ) or repair ( merge phase ) this item , as needed
final InvoiceItem proratedInvoiceItem = item . toProratedInvoiceItem ( startDate , endDate ) ; if ( proratedInvoiceItem == null ) { return null ; } else { // Keep track of the repaired amount for this item
item . incrementCurrentRepairedAmount ( proratedInvoiceItem . getAmount ( ) . abs ( ) ) ; return new Item ( proratedInvoiceItem , targetInvoiceId , item . getAction ( ) ) ; } |
public class CPDefinitionLinkPersistenceImpl { /** * Returns all the cp definition links where CProductId = & # 63 ; and type = & # 63 ; .
* @ param CProductId the c product ID
* @ param type the type
* @ return the matching cp definition links */
@ Override public List < CPDefinitionLink > findByCP_T ( long CProductId , String type ) { } } | return findByCP_T ( CProductId , type , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class RelationalHSCache { /** * Loads all elements of this class from the data store . Use this method only when you know
* exactly what you are doing . Otherwise , you will pull a lot of data .
* @ return all objects from the database
* @ throws PersistenceException an error occurred executing the query */
@ Override public Collection < T > list ( ) throws PersistenceException { } } | logger . debug ( "enter - list()" ) ; try { return find ( null , null , false ) ; } finally { logger . debug ( "exit - list()" ) ; } |
public class CommerceNotificationQueueEntryPersistenceImpl { /** * Creates a new commerce notification queue entry with the primary key . Does not add the commerce notification queue entry to the database .
* @ param commerceNotificationQueueEntryId the primary key for the new commerce notification queue entry
* @ return the new commerce notification queue entry */
@ Override public CommerceNotificationQueueEntry create ( long commerceNotificationQueueEntryId ) { } } | CommerceNotificationQueueEntry commerceNotificationQueueEntry = new CommerceNotificationQueueEntryImpl ( ) ; commerceNotificationQueueEntry . setNew ( true ) ; commerceNotificationQueueEntry . setPrimaryKey ( commerceNotificationQueueEntryId ) ; commerceNotificationQueueEntry . setCompanyId ( companyProvider . getCompanyId ( ) ) ; return commerceNotificationQueueEntry ; |
public class AbstractThrowEventBuilder { /** * Creates an empty message event definition with the given id
* and returns a builder for the message event definition .
* @ param id the id of the message event definition
* @ return the message event definition builder object */
public MessageEventDefinitionBuilder messageEventDefinition ( String id ) { } } | MessageEventDefinition messageEventDefinition = createEmptyMessageEventDefinition ( ) ; if ( id != null ) { messageEventDefinition . setId ( id ) ; } element . getEventDefinitions ( ) . add ( messageEventDefinition ) ; return new MessageEventDefinitionBuilder ( modelInstance , messageEventDefinition ) ; |
public class CFG { /** * ( non - Javadoc )
* @ see
* edu . umd . cs . findbugs . graph . AbstractGraph # removeEdge ( edu . umd . cs . findbugs
* . graph . AbstractEdge ) */
@ Override public void removeEdge ( Edge edge ) { } } | super . removeEdge ( edge ) ; // Keep track of removed edges .
if ( removedEdgeList == null ) { removedEdgeList = new LinkedList < > ( ) ; } removedEdgeList . add ( edge ) ; |
public class DeviceTypesApi { /** * Get Available Manifest Versions
* Get a Device Type & # 39 ; s available manifest versions
* @ param deviceTypeId deviceTypeId ( required )
* @ return ApiResponse & lt ; ManifestVersionsEnvelope & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < ManifestVersionsEnvelope > getAvailableManifestVersionsWithHttpInfo ( String deviceTypeId ) throws ApiException { } } | com . squareup . okhttp . Call call = getAvailableManifestVersionsValidateBeforeCall ( deviceTypeId , null , null ) ; Type localVarReturnType = new TypeToken < ManifestVersionsEnvelope > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ; |
public class EndpointInfoBuilder { /** * Sets the available { @ link MediaType } s . */
public EndpointInfoBuilder availableMimeTypes ( MediaType ... availableMimeTypes ) { } } | requireNonNull ( availableMimeTypes , "availableMimeTypes" ) ; return availableMimeTypes ( ImmutableSet . copyOf ( availableMimeTypes ) ) ; |
public class DigitalRadial { /** * < editor - fold defaultstate = " collapsed " desc = " Getters and Setters " > */
@ Override public void setValue ( double value ) { } } | super . setValue ( value ) ; // Set active leds relating to the new value
calcNoOfActiveLed ( ) ; if ( isValueCoupled ( ) ) { setLcdValue ( value ) ; } repaint ( getInnerBounds ( ) ) ; |
public class UCaseProps { /** * simple case mappings - - - - - * * * */
public final int tolower ( int c ) { } } | int props = trie . get ( c ) ; if ( ! propsHasException ( props ) ) { if ( getTypeFromProps ( props ) >= UPPER ) { c += getDelta ( props ) ; } } else { int excOffset = getExceptionsOffset ( props ) ; int excWord = exceptions . charAt ( excOffset ++ ) ; if ( hasSlot ( excWord , EXC_LOWER ) ) { c = getSlotValue ( excWord , EXC_LOWER , excOffset ) ; } } return c ; |
public class AggregatedSuiteResultEvent { /** * The number of tests that have { @ link TestStatus # ERROR } and
* include the suite - level errors . */
public int getErrorCount ( ) { } } | int count = 0 ; for ( AggregatedTestResultEvent t : getTests ( ) ) { if ( t . getStatus ( ) == TestStatus . ERROR ) count ++ ; } for ( FailureMirror m : getFailures ( ) ) { if ( m . isErrorViolation ( ) ) count ++ ; } return count ; |
public class WebServiceFactory { /** * get a service instance the service must have a interface and implements
* it . */
public Object getService ( TargetMetaDef targetMetaDef , RequestWrapper request ) { } } | userTargetMetaDefFactory . createTargetMetaRequest ( targetMetaDef , request . getContextHolder ( ) ) ; return webServiceAccessor . getService ( request ) ; |
public class CreateGroupResult { /** * The tags associated with the group .
* @ param tags
* The tags associated with the group .
* @ return Returns a reference to this object so that method calls can be chained together . */
public CreateGroupResult withTags ( java . util . Map < String , String > tags ) { } } | setTags ( tags ) ; return this ; |
public class Togglz { /** * The version of Togglz or < code > null < / code > if it cannot be identified */
public static String getVersion ( ) { } } | ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( classLoader == null ) { classLoader = Togglz . class . getClassLoader ( ) ; } URL url = classLoader . getResource ( "META-INF/maven/org.togglz/togglz-core/pom.properties" ) ; if ( url != null ) { InputStream stream = null ; try { stream = url . openStream ( ) ; Properties props = new Properties ( ) ; props . load ( stream ) ; return Strings . trimToNull ( props . getProperty ( "version" ) ) ; } catch ( IOException e ) { // ignore
} finally { if ( stream != null ) { try { stream . close ( ) ; } catch ( IOException e ) { // ignore
} } } } return null ; |
public class BayesInstance { /** * Passes a message from node1 to node2.
* node1 projects its trgPotentials into the separator .
* node2 then absorbs those trgPotentials from the separator .
* @ param sourceClique
* @ param sep
* @ param targetClique */
public void passMessage ( JunctionTreeClique sourceClique , JunctionTreeSeparator sep , JunctionTreeClique targetClique ) { } } | double [ ] sepPots = separatorStates [ sep . getId ( ) ] . getPotentials ( ) ; double [ ] oldSepPots = Arrays . copyOf ( sepPots , sepPots . length ) ; BayesVariable [ ] sepVars = sep . getValues ( ) . toArray ( new BayesVariable [ sep . getValues ( ) . size ( ) ] ) ; if ( passMessageListener != null ) { passMessageListener . beforeProjectAndAbsorb ( sourceClique , sep , targetClique , oldSepPots ) ; } project ( sepVars , cliqueStates [ sourceClique . getId ( ) ] , separatorStates [ sep . getId ( ) ] ) ; if ( passMessageListener != null ) { passMessageListener . afterProject ( sourceClique , sep , targetClique , oldSepPots ) ; } absorb ( sepVars , cliqueStates [ targetClique . getId ( ) ] , separatorStates [ sep . getId ( ) ] , oldSepPots ) ; if ( passMessageListener != null ) { passMessageListener . afterAbsorb ( sourceClique , sep , targetClique , oldSepPots ) ; } |
public class HessianInput { /** * Reads a remote object . */
public Object readRemote ( ) throws IOException { } } | String type = readType ( ) ; String url = readString ( ) ; return resolveRemote ( type , url ) ; |
public class CopyHelper { /** * Contains a collection of supported immutable types for copying .
* Only keep the types that are worth supporting as record types .
* @ param thing : an Object being checked
* @ return true if supported immutable type , false otherwise */
private static boolean isImmutableType ( Object thing ) { } } | return ( ( thing == null ) || ( thing instanceof String ) || ( thing instanceof Integer ) || ( thing instanceof Long ) ) ; |
public class EthiopicDate { @ Override // for covariant return type
@ SuppressWarnings ( "unchecked" ) public ChronoLocalDateTime < EthiopicDate > atTime ( LocalTime localTime ) { } } | return ( ChronoLocalDateTime < EthiopicDate > ) super . atTime ( localTime ) ; |
public class ImporterStatsCollector { /** * Use this when the insert fails even before the request is queued by the InternalConnectionHandler */
public void reportFailure ( String importerName , String procName , boolean decrementPending ) { } } | StatsInfo statsInfo = getStatsInfo ( importerName , procName ) ; if ( decrementPending ) { statsInfo . m_pendingCount . decrementAndGet ( ) ; } statsInfo . m_failureCount . incrementAndGet ( ) ; |
public class SanitizedContents { /** * Converts a { @ link SafeHtmlProto } into a Soy { @ link SanitizedContent } of kind HTML . */
public static SanitizedContent fromSafeHtmlProto ( SafeHtmlProto html ) { } } | return SanitizedContent . create ( SafeHtmls . fromProto ( html ) . getSafeHtmlString ( ) , ContentKind . HTML ) ; |
public class LogIterator { /** * Moves to the previous log block in reverse order , and positions it after
* the last record in that block . */
private void moveToPrevBlock ( ) { } } | blk = new BlockId ( blk . fileName ( ) , blk . number ( ) + 1 ) ; pg . read ( blk ) ; currentRec = 0 + pointerSize ; |
public class A_CmsUploadDialog { /** * Calls the submit action if there are any files selected for upload . < p > */
private void commit ( ) { } } | m_selectionDone = true ; if ( ! m_filesToUpload . isEmpty ( ) ) { m_okButton . disable ( Messages . get ( ) . key ( Messages . GUI_UPLOAD_BUTTON_OK_DISABLE_UPLOADING_0 ) ) ; if ( m_uploadButton instanceof UIObject ) { ( ( UIObject ) m_uploadButton ) . getElement ( ) . getStyle ( ) . setDisplay ( Display . NONE ) ; } showProgress ( ) ; submit ( ) ; } |
public class EditLogFileOutputStream { /** * Write a transaction to the stream . The serialization format is :
* < ul >
* < li > the opcode ( byte ) < / li >
* < li > the transaction id ( long ) < / li >
* < li > the actual Writables for the transaction < / li >
* < / ul > */
@ Override public void writeRaw ( byte [ ] bytes , int offset , int length ) throws IOException { } } | doubleBuf . writeRaw ( bytes , offset , length ) ; |
public class ServletWebServerApplicationContext { /** * Prepare the { @ link WebApplicationContext } with the given fully loaded
* { @ link ServletContext } . This method is usually called from
* { @ link ServletContextInitializer # onStartup ( ServletContext ) } and is similar to the
* functionality usually provided by a { @ link ContextLoaderListener } .
* @ param servletContext the operational servlet context */
protected void prepareWebApplicationContext ( ServletContext servletContext ) { } } | Object rootContext = servletContext . getAttribute ( WebApplicationContext . ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE ) ; if ( rootContext != null ) { if ( rootContext == this ) { throw new IllegalStateException ( "Cannot initialize context because there is already a root application context present - " + "check whether you have multiple ServletContextInitializers!" ) ; } return ; } Log logger = LogFactory . getLog ( ContextLoader . class ) ; servletContext . log ( "Initializing Spring embedded WebApplicationContext" ) ; try { servletContext . setAttribute ( WebApplicationContext . ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE , this ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Published root WebApplicationContext as ServletContext attribute with name [" + WebApplicationContext . ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE + "]" ) ; } setServletContext ( servletContext ) ; if ( logger . isInfoEnabled ( ) ) { long elapsedTime = System . currentTimeMillis ( ) - getStartupDate ( ) ; logger . info ( "Root WebApplicationContext: initialization completed in " + elapsedTime + " ms" ) ; } } catch ( RuntimeException | Error ex ) { logger . error ( "Context initialization failed" , ex ) ; servletContext . setAttribute ( WebApplicationContext . ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE , ex ) ; throw ex ; } |
public class JvmTypesBuilder { /** * Attaches the given headText to the given { @ link JvmDeclaredType } . */
public void setFileHeader ( /* @ Nullable */
JvmDeclaredType jvmDeclaredType , /* @ Nullable */
String headerText ) { } } | if ( jvmDeclaredType == null || headerText == null ) { return ; } FileHeaderAdapter fileHeaderAdapter = ( FileHeaderAdapter ) EcoreUtil . getAdapter ( jvmDeclaredType . eAdapters ( ) , FileHeaderAdapter . class ) ; if ( fileHeaderAdapter == null ) { fileHeaderAdapter = new FileHeaderAdapter ( ) ; jvmDeclaredType . eAdapters ( ) . add ( fileHeaderAdapter ) ; } fileHeaderAdapter . setHeaderText ( headerText ) ; |
public class JsonUtil { /** * Returns a field in a Json object as an object .
* Throws IllegalArgumentException if the field value is null .
* @ param object the Json object
* @ param field the field in the Json object to return
* @ return the Json field value as a Json object */
public static JsonObject getObject ( JsonObject object , String field ) { } } | final JsonValue value = object . get ( field ) ; throwExceptionIfNull ( value , field ) ; return value . asObject ( ) ; |
public class PacketWriter { /** * Signs the provided packet , so a CollectD server can verify that its authenticity .
* Wire format :
* < pre >
* ! Type ( 0x0200 ) ! Length !
* ! Signature ( SHA2 ( username + packet ) ) \
* ! Username ! Packet \
* < / pre >
* @ see < a href = " https : / / collectd . org / wiki / index . php / Binary _ protocol # Signature _ part " >
* Binary protocol - CollectD | Signature part < / a > */
private ByteBuffer signPacket ( ByteBuffer packet ) { } } | final byte [ ] signature = sign ( password , ( ByteBuffer ) ByteBuffer . allocate ( packet . remaining ( ) + username . length ) . put ( username ) . put ( packet ) . flip ( ) ) ; return ( ByteBuffer ) ByteBuffer . allocate ( BUFFER_SIZE ) . putShort ( ( short ) TYPE_SIGN_SHA256 ) . putShort ( ( short ) ( username . length + SIGNATURE_LEN ) ) . put ( signature ) . put ( username ) . put ( ( ByteBuffer ) packet . flip ( ) ) . flip ( ) ; |
public class HashCodeBuilder { /** * Uses reflection to build a valid hash code from the fields of { @ code object } .
* This constructor uses two hard coded choices for the constants needed to build a hash code .
* It uses < code > AccessibleObject . setAccessible < / code > to gain access to private fields . This means that it will
* throw a security exception if run under a security manager , if the permissions are not set up correctly . It is
* also not as efficient as testing explicitly .
* Transient members will be not be used , as they are likely derived fields , and not part of the value of the
* < code > Object < / code > .
* Static fields will not be tested . Superclass fields will be included . If no fields are found to include
* in the hash code , the result of this method will be constant .
* @ param object
* the Object to create a < code > hashCode < / code > for
* @ param excludeFields
* Collection of String field names to exclude from use in calculation of hash code
* @ return int hash code
* @ throws IllegalArgumentException
* if the object is < code > null < / code >
* @ see HashCodeExclude */
@ GwtIncompatible ( "incompatible method" ) public static int reflectionHashCode ( final Object object , final Collection < String > excludeFields ) { } } | return reflectionHashCode ( object , ReflectionToStringBuilder . toNoNullStringArray ( excludeFields ) ) ; |
public class DescribeInstanceHealthResult { /** * Information about the health of the instances .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setInstanceStates ( java . util . Collection ) } or { @ link # withInstanceStates ( java . util . Collection ) } if you want
* to override the existing values .
* @ param instanceStates
* Information about the health of the instances .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeInstanceHealthResult withInstanceStates ( InstanceState ... instanceStates ) { } } | if ( this . instanceStates == null ) { setInstanceStates ( new com . amazonaws . internal . SdkInternalList < InstanceState > ( instanceStates . length ) ) ; } for ( InstanceState ele : instanceStates ) { this . instanceStates . add ( ele ) ; } return this ; |
public class Grid { /** * Adds the i and j atoms and fills the grid , passing their bounds ( array of size 6 with x , y , z minima and x , y , z maxima )
* This way the bounds don ' t need to be recomputed .
* Subsequent call to { @ link # getIndicesContacts ( ) } or { @ link # getAtomContacts ( ) } will produce the interatomic contacts .
* @ param iAtoms
* @ param icoordbounds
* @ param jAtoms
* @ param jcoordbounds */
public void addAtoms ( Atom [ ] iAtoms , BoundingBox icoordbounds , Atom [ ] jAtoms , BoundingBox jcoordbounds ) { } } | this . iAtoms = Calc . atomsToPoints ( iAtoms ) ; this . iAtomObjects = iAtoms ; if ( icoordbounds != null ) { this . ibounds = icoordbounds ; } else { this . ibounds = new BoundingBox ( this . iAtoms ) ; } this . jAtoms = Calc . atomsToPoints ( jAtoms ) ; this . jAtomObjects = jAtoms ; if ( jAtoms == iAtoms ) { this . jbounds = ibounds ; } else { if ( jcoordbounds != null ) { this . jbounds = jcoordbounds ; } else { this . jbounds = new BoundingBox ( this . jAtoms ) ; } } fillGrid ( ) ; |
public class JCalReader { /** * Reads the next iCalendar object from the JSON data stream .
* @ return the iCalendar object or null if there are no more
* @ throws JCalParseException if the jCal syntax is incorrect ( the JSON
* syntax may be valid , but it is not in the correct jCal format ) .
* @ throws JsonParseException if the JSON syntax is incorrect
* @ throws IOException if there is a problem reading from the data stream */
@ Override public ICalendar _readNext ( ) throws IOException { } } | if ( reader . eof ( ) ) { return null ; } context . setVersion ( ICalVersion . V2_0 ) ; JCalDataStreamListenerImpl listener = new JCalDataStreamListenerImpl ( ) ; reader . readNext ( listener ) ; return listener . getICalendar ( ) ; |
public class StackHelper { /** * Create a new stack from the given collection .
* @ param < ELEMENTTYPE >
* The type of elements contained in the stack .
* @ param aValues
* The values that are to be pushed on the stack . The last element will
* be the top element on the stack . May not be < code > null < / code > .
* @ return A non - < code > null < / code > stack object . */
@ Nonnull @ ReturnsMutableCopy public static < ELEMENTTYPE > NonBlockingStack < ELEMENTTYPE > newStack ( @ Nullable final Collection < ? extends ELEMENTTYPE > aValues ) { } } | return new NonBlockingStack < > ( aValues ) ; |
public class EthereumUtil { /** * Decodes an RLPElement from the given ByteBuffer
* @ param bb Bytebuffer containing an RLPElement
* @ return RLPElement in case the byte stream represents a valid RLPElement , null if not */
private static RLPElement decodeRLPElement ( ByteBuffer bb ) { } } | RLPElement result = null ; byte firstByte = bb . get ( ) ; int firstByteUnsigned = firstByte & 0xFF ; if ( firstByteUnsigned <= 0x7F ) { result = new RLPElement ( new byte [ ] { firstByte } , new byte [ ] { firstByte } ) ; } else if ( ( firstByteUnsigned >= 0x80 ) && ( firstByteUnsigned <= 0xb7 ) ) { // read indicator
byte [ ] indicator = new byte [ ] { firstByte } ; int noOfBytes = firstByteUnsigned - 0x80 ; // read raw data
byte [ ] rawData = new byte [ noOfBytes ] ; if ( noOfBytes > 0 ) { bb . get ( rawData ) ; } result = new RLPElement ( indicator , rawData ) ; } else if ( ( firstByteUnsigned >= 0xb8 ) && ( firstByteUnsigned <= 0xbf ) ) { // read size of indicator ( size of the size )
int NoOfBytesSize = firstByteUnsigned - 0xb7 ; byte [ ] indicator = new byte [ NoOfBytesSize + 1 ] ; indicator [ 0 ] = firstByte ; bb . get ( indicator , 1 , NoOfBytesSize ) ; long noOfBytes = convertIndicatorToRLPSize ( indicator ) ; // read the data
byte [ ] rawData = new byte [ ( int ) noOfBytes ] ; bb . get ( rawData ) ; result = new RLPElement ( indicator , rawData ) ; } else { result = null ; } return result ; |
public class IntInterval { /** * Returns an IntInterval representing the odd values from the value from to the value to . */
public static IntInterval oddsFromTo ( int from , int to ) { } } | if ( from % 2 == 0 ) { if ( from < to ) { from ++ ; } else { from -- ; } } if ( to % 2 == 0 ) { if ( to > from ) { to -- ; } else { to ++ ; } } return IntInterval . fromToBy ( from , to , to > from ? 2 : - 2 ) ; |
public class LIBORMarketModelFromCovarianceModel { /** * Creates a LIBOR Market Model for given covariance with a calibration ( if calibration items are given ) .
* < br >
* If calibrationItems in non - empty and the covariance model is a parametric model ,
* the covariance will be replaced by a calibrate version of the same model , i . e . ,
* the LIBOR Market Model will be calibrated . Note : Calibration is not lazy .
* < br >
* The map < code > properties < / code > allows to configure the model . The following keys may be used :
* < ul >
* < li >
* < code > measure < / code > : Possible values :
* < ul >
* < li >
* < code > SPOT < / code > ( < code > String < / code > ) : Simulate under spot measure .
* < / li >
* < li >
* < code > TERMINAL < / code > ( < code > String < / code > ) : Simulate under terminal measure .
* < / li >
* < / ul >
* < / li >
* < li >
* < code > stateSpace < / code > : Possible values :
* < ul >
* < li >
* < code > LOGNORMAL < / code > ( < code > String < / code > ) : Simulate < i > L = exp ( Y ) < / i > .
* < / li >
* < li >
* < code > NORMAL < / code > ( < code > String < / code > ) : Simulate < i > L = Y < / i > .
* < / li >
* < / ul >
* < / li >
* < li >
* < code > liborCap < / code > : An optional < code > Double < / code > value applied as a cap to the LIBOR rates .
* May be used to limit the simulated valued to prevent values attaining POSITIVE _ INFINITY and
* numerical problems . To disable the cap , set < code > liborCap < / code > to < code > Double . POSITIVE _ INFINITY < / code > .
* < / li >
* < li >
* < code > calibrationParameters < / code > : Possible values :
* < ul >
* < li >
* < code > Map & lt ; String , Object & gt ; < / code > a parameter map with the following key / value pairs :
* < ul >
* < li >
* < code > accuracy < / code > : < code > Double < / code > specifying the required solver accuracy .
* < / li >
* < li >
* < code > maxIterations < / code > : < code > Integer < / code > specifying the maximum iterations for the solver .
* < / li >
* < / ul >
* < / li >
* < / ul >
* < / li >
* < / ul >
* @ param liborPeriodDiscretization The discretization of the interest rate curve into forward rates ( tenor structure ) .
* @ param analyticModel The associated analytic model of this model ( containing the associated market data objects like curve ) .
* @ param forwardRateCurve The initial values for the forward rates .
* @ param discountCurve The discount curve to use . This will create an LMM model with a deterministic zero - spread discounting adjustment .
* @ param randomVariableFactory The random variable factory used to create the inital values of the model .
* @ param covarianceModel The covariance model to use .
* @ param calibrationProducts The vector of calibration items ( a union of a product , target value and weight ) for the objective function sum weight ( i ) * ( modelValue ( i ) - targetValue ( i ) .
* @ param properties Key value map specifying properties like < code > measure < / code > and < code > stateSpace < / code > .
* @ return A new instance of LIBORMarketModelFromCovarianceModel , possibly calibrated .
* @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . */
public static LIBORMarketModelFromCovarianceModel of ( TimeDiscretization liborPeriodDiscretization , AnalyticModel analyticModel , ForwardCurve forwardRateCurve , DiscountCurve discountCurve , AbstractRandomVariableFactory randomVariableFactory , LIBORCovarianceModel covarianceModel , CalibrationProduct [ ] calibrationProducts , Map < String , ? > properties ) throws CalculationException { } } | LIBORMarketModelFromCovarianceModel model = new LIBORMarketModelFromCovarianceModel ( liborPeriodDiscretization , analyticModel , forwardRateCurve , discountCurve , randomVariableFactory , covarianceModel , properties ) ; // Perform calibration , if data is given
if ( calibrationProducts != null && calibrationProducts . length > 0 ) { Map < String , Object > calibrationParameters = null ; if ( properties != null && properties . containsKey ( "calibrationParameters" ) ) { calibrationParameters = ( Map < String , Object > ) properties . get ( "calibrationParameters" ) ; } LIBORCovarianceModelCalibrateable covarianceModelParametric = null ; try { covarianceModelParametric = ( LIBORCovarianceModelCalibrateable ) covarianceModel ; } catch ( Exception e ) { throw new ClassCastException ( "Calibration restricted to covariance models implementing LIBORCovarianceModelCalibrateable." ) ; } LIBORCovarianceModel covarianceModelCalibrated = covarianceModelParametric . getCloneCalibrated ( model , calibrationProducts , calibrationParameters ) ; LIBORMarketModelFromCovarianceModel modelCalibrated = model . getCloneWithModifiedCovarianceModel ( covarianceModelCalibrated ) ; return modelCalibrated ; } else { return model ; } |
public class TimeGuard { /** * Check all registered time watchers for time bound violations .
* @ see # addGuard ( java . lang . String , long )
* @ see # addGuard ( java . lang . String , long , com . igormaznitsa . meta . common . utils . TimeGuard . TimeAlertListener )
* @ since 1.0 */
@ Weight ( value = Weight . Unit . VARIABLE , comment = "Depends on the current call stack depth" ) public static void check ( ) { } } | final long time = System . currentTimeMillis ( ) ; final int stackDepth = ThreadUtils . stackDepth ( ) ; final List < TimeData > list = REGISTRY . get ( ) ; final Iterator < TimeData > iterator = list . iterator ( ) ; while ( iterator . hasNext ( ) ) { final TimeData timeWatchItem = iterator . next ( ) ; if ( timeWatchItem . getDetectedStackDepth ( ) >= stackDepth ) { final boolean timePoint = timeWatchItem . isTimePoint ( ) ; try { final long detectedDelay = time - timeWatchItem . getCreationTimeInMilliseconds ( ) ; if ( timePoint ) { try { timeWatchItem . getAlertListener ( ) . onTimeAlert ( detectedDelay , timeWatchItem ) ; } catch ( Exception ex ) { final UnexpectedProcessingError error = new UnexpectedProcessingError ( "Error during time point processing" , ex ) ; MetaErrorListeners . fireError ( error . getMessage ( ) , error ) ; } } else if ( detectedDelay > timeWatchItem . getMaxAllowedDelayInMilliseconds ( ) ) { final TimeAlertListener processor = timeWatchItem . getAlertListener ( ) ; if ( processor == NULL_TIME_ALERT_LISTENER ) { MetaErrorListeners . fireError ( "Detected time violation without defined time alert listener" , new TimeViolationError ( detectedDelay , timeWatchItem ) ) ; } else { try { processor . onTimeAlert ( detectedDelay , timeWatchItem ) ; } catch ( Exception ex ) { final UnexpectedProcessingError error = new UnexpectedProcessingError ( "Error during time alert processing" , ex ) ; MetaErrorListeners . fireError ( error . getMessage ( ) , error ) ; } } } } finally { iterator . remove ( ) ; } } } if ( list . isEmpty ( ) ) { REGISTRY . remove ( ) ; } |
public class CommerceNotificationTemplateUserSegmentRelPersistenceImpl { /** * Returns a range of all the commerce notification template user segment rels where commerceNotificationTemplateId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceNotificationTemplateUserSegmentRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceNotificationTemplateId the commerce notification template ID
* @ param start the lower bound of the range of commerce notification template user segment rels
* @ param end the upper bound of the range of commerce notification template user segment rels ( not inclusive )
* @ return the range of matching commerce notification template user segment rels */
@ Override public List < CommerceNotificationTemplateUserSegmentRel > findByCommerceNotificationTemplateId ( long commerceNotificationTemplateId , int start , int end ) { } } | return findByCommerceNotificationTemplateId ( commerceNotificationTemplateId , start , end , null ) ; |
public class Emitter { /** * Adds a LinkRef to this set of LinkRefs .
* @ param sKey
* The key / id .
* @ param aLinkRef
* The LinkRef . */
public void addLinkRef ( @ Nonnull final String sKey , final LinkRef aLinkRef ) { } } | m_aLinkRefs . put ( sKey . toLowerCase ( Locale . US ) , aLinkRef ) ; |
public class CcgGrammarUtils { /** * Constructs a distribution over binary combination rules for CCG ,
* given a set of syntactic categories . This method compiles out all
* of the possible ways to combine two adjacent CCG categories using
* function application , composition , and any other binary rules .
* @ param syntaxType
* @ param rules
* @ param allowComposition
* @ return */
public static DiscreteFactor buildUnrestrictedBinaryDistribution ( DiscreteVariable syntaxType , Iterable < CcgBinaryRule > rules , boolean allowComposition ) { } } | List < HeadedSyntacticCategory > allCategories = syntaxType . getValuesWithCast ( HeadedSyntacticCategory . class ) ; Set < List < Object > > validOutcomes = Sets . newHashSet ( ) ; Set < Combinator > combinators = Sets . newHashSet ( ) ; // Compute function application rules .
for ( HeadedSyntacticCategory functionCat : allCategories ) { for ( HeadedSyntacticCategory argumentCat : allCategories ) { appendApplicationRules ( functionCat , argumentCat , syntaxType , validOutcomes , combinators ) ; } } if ( allowComposition ) { // Compute function composition rules .
for ( HeadedSyntacticCategory functionCat : allCategories ) { for ( HeadedSyntacticCategory argumentCat : allCategories ) { appendCompositionRules ( functionCat , argumentCat , syntaxType , validOutcomes , combinators ) ; } } } appendBinaryRules ( rules , syntaxType , validOutcomes , combinators ) ; return buildSyntaxDistribution ( syntaxType , validOutcomes , combinators ) ; |
public class DataTransformProcess { /** * Consolidates transformation objects when possible . Currently only works with { @ link RemoveAttributeTransform } */
private void consolidateTransforms ( ) { } } | for ( int i = 0 ; i < learnedTransforms . size ( ) - 1 ; i ++ ) { DataTransform t1 = learnedTransforms . get ( i ) ; DataTransform t2 = learnedTransforms . get ( i + 1 ) ; if ( ! ( t1 instanceof RemoveAttributeTransform && t2 instanceof RemoveAttributeTransform ) ) continue ; // They are not both RATs
RemoveAttributeTransform r1 = ( RemoveAttributeTransform ) t1 ; RemoveAttributeTransform r2 = ( RemoveAttributeTransform ) t2 ; r2 . consolidate ( r1 ) ; learnedTransforms . remove ( i ) ; i -- ; } |
public class RunningJobImpl { /** * Inform the client of a failed job .
* @ param jobStatusProto status of the failed job */
private synchronized void onJobFailure ( final JobStatusProto jobStatusProto ) { } } | assert jobStatusProto . getState ( ) == ReefServiceProtos . State . FAILED ; final String id = this . jobId ; final Optional < byte [ ] > data = jobStatusProto . hasException ( ) ? Optional . of ( jobStatusProto . getException ( ) . toByteArray ( ) ) : Optional . < byte [ ] > empty ( ) ; final Optional < Throwable > cause = this . exceptionCodec . fromBytes ( data ) ; final String message ; if ( cause . isPresent ( ) && cause . get ( ) . getMessage ( ) != null ) { message = cause . get ( ) . getMessage ( ) ; } else { message = "No Message sent by the Job in exception " + cause . get ( ) ; LOG . log ( Level . WARNING , message , cause . get ( ) ) ; } final Optional < String > description = Optional . of ( message ) ; final FailedJob failedJob = new FailedJob ( id , message , description , cause , data ) ; this . failedJobEventHandler . onNext ( failedJob ) ; |
public class Messenger { /** * Save Group ' s permissions
* @ param gid group ' s id
* @ param adminSettings settings
* @ return promise of void */
@ NotNull @ ObjectiveCName ( "saveGroupPermissionsWithGid:withSettings:" ) public Promise < Void > saveGroupPermissions ( int gid , GroupPermissions adminSettings ) { } } | return modules . getGroupsModule ( ) . saveAdminSettings ( gid , adminSettings ) ; |
public class DefaultDirectSuperclasses { /** * this compensates for the lack of map */
public static List < Class < ? > > makeArrayClasses ( List < Class < ? > > classes , int dims ) { } } | Iterator < Class < ? > > i = classes . iterator ( ) ; LinkedList < Class < ? > > arrayClasses = new LinkedList < Class < ? > > ( ) ; while ( i . hasNext ( ) ) arrayClasses . add ( makeArrayClass ( i . next ( ) , dims ) ) ; return arrayClasses ; |
public class Bytes { /** * Store a < b > short < / b > number into a byte array in a given byte order */
public static void setShort ( int n , byte [ ] b , int off , boolean littleEndian ) { } } | if ( littleEndian ) { b [ off ] = ( byte ) n ; b [ off + 1 ] = ( byte ) ( n >>> 8 ) ; } else { b [ off ] = ( byte ) ( n >>> 8 ) ; b [ off + 1 ] = ( byte ) n ; } |
public class InternalXbaseParser { /** * InternalXbase . g : 2961:1 : ruleXSwitchExpression returns [ EObject current = null ] : ( ( ) otherlv _ 1 = ' switch ' ( ( ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) ' ) | ( ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) ) ) ) otherlv _ 10 = ' { ' ( ( lv _ cases _ 11_0 = ruleXCasePart ) ) * ( otherlv _ 12 = ' default ' otherlv _ 13 = ' : ' ( ( lv _ default _ 14_0 = ruleXExpression ) ) ) ? otherlv _ 15 = ' } ' ) ; */
public final EObject ruleXSwitchExpression ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_1 = null ; Token otherlv_2 = null ; Token otherlv_4 = null ; Token otherlv_6 = null ; Token otherlv_8 = null ; Token otherlv_10 = null ; Token otherlv_12 = null ; Token otherlv_13 = null ; Token otherlv_15 = null ; EObject lv_declaredParam_3_0 = null ; EObject lv_switch_5_0 = null ; EObject lv_declaredParam_7_0 = null ; EObject lv_switch_9_0 = null ; EObject lv_cases_11_0 = null ; EObject lv_default_14_0 = null ; enterRule ( ) ; try { // InternalXbase . g : 2967:2 : ( ( ( ) otherlv _ 1 = ' switch ' ( ( ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) ' ) | ( ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) ) ) ) otherlv _ 10 = ' { ' ( ( lv _ cases _ 11_0 = ruleXCasePart ) ) * ( otherlv _ 12 = ' default ' otherlv _ 13 = ' : ' ( ( lv _ default _ 14_0 = ruleXExpression ) ) ) ? otherlv _ 15 = ' } ' ) )
// InternalXbase . g : 2968:2 : ( ( ) otherlv _ 1 = ' switch ' ( ( ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) ' ) | ( ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) ) ) ) otherlv _ 10 = ' { ' ( ( lv _ cases _ 11_0 = ruleXCasePart ) ) * ( otherlv _ 12 = ' default ' otherlv _ 13 = ' : ' ( ( lv _ default _ 14_0 = ruleXExpression ) ) ) ? otherlv _ 15 = ' } ' )
{ // InternalXbase . g : 2968:2 : ( ( ) otherlv _ 1 = ' switch ' ( ( ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) ' ) | ( ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) ) ) ) otherlv _ 10 = ' { ' ( ( lv _ cases _ 11_0 = ruleXCasePart ) ) * ( otherlv _ 12 = ' default ' otherlv _ 13 = ' : ' ( ( lv _ default _ 14_0 = ruleXExpression ) ) ) ? otherlv _ 15 = ' } ' )
// InternalXbase . g : 2969:3 : ( ) otherlv _ 1 = ' switch ' ( ( ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) ' ) | ( ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) ) ) ) otherlv _ 10 = ' { ' ( ( lv _ cases _ 11_0 = ruleXCasePart ) ) * ( otherlv _ 12 = ' default ' otherlv _ 13 = ' : ' ( ( lv _ default _ 14_0 = ruleXExpression ) ) ) ? otherlv _ 15 = ' } '
{ // InternalXbase . g : 2969:3 : ( )
// InternalXbase . g : 2970:4:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXSwitchExpressionAccess ( ) . getXSwitchExpressionAction_0 ( ) , current ) ; } } otherlv_1 = ( Token ) match ( input , 60 , FOLLOW_46 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXSwitchExpressionAccess ( ) . getSwitchKeyword_1 ( ) ) ; } // InternalXbase . g : 2980:3 : ( ( ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) ' ) | ( ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) ) ) )
int alt49 = 2 ; alt49 = dfa49 . predict ( input ) ; switch ( alt49 ) { case 1 : // InternalXbase . g : 2981:4 : ( ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) ' )
{ // InternalXbase . g : 2981:4 : ( ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) ' )
// InternalXbase . g : 2982:5 : ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) ) ( ( lv _ switch _ 5_0 = ruleXExpression ) ) otherlv _ 6 = ' ) '
{ // InternalXbase . g : 2982:5 : ( ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' ) )
// InternalXbase . g : 2983:6 : ( ( ' ( ' ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' )
{ // InternalXbase . g : 2993:6 : ( otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : ' )
// InternalXbase . g : 2994:7 : otherlv _ 2 = ' ( ' ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) ) otherlv _ 4 = ' : '
{ otherlv_2 = ( Token ) match ( input , 49 , FOLLOW_13 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXSwitchExpressionAccess ( ) . getLeftParenthesisKeyword_2_0_0_0_0 ( ) ) ; } // InternalXbase . g : 2998:7 : ( ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter ) )
// InternalXbase . g : 2999:8 : ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter )
{ // InternalXbase . g : 2999:8 : ( lv _ declaredParam _ 3_0 = ruleJvmFormalParameter )
// InternalXbase . g : 3000:9 : lv _ declaredParam _ 3_0 = ruleJvmFormalParameter
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSwitchExpressionAccess ( ) . getDeclaredParamJvmFormalParameterParserRuleCall_2_0_0_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_47 ) ; lv_declaredParam_3_0 = ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSwitchExpressionRule ( ) ) ; } set ( current , "declaredParam" , lv_declaredParam_3_0 , "org.eclipse.xtext.xbase.Xbase.JvmFormalParameter" ) ; afterParserOrEnumRuleCall ( ) ; } } } otherlv_4 = ( Token ) match ( input , 61 , FOLLOW_4 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_4 , grammarAccess . getXSwitchExpressionAccess ( ) . getColonKeyword_2_0_0_0_2 ( ) ) ; } } } // InternalXbase . g : 3023:5 : ( ( lv _ switch _ 5_0 = ruleXExpression ) )
// InternalXbase . g : 3024:6 : ( lv _ switch _ 5_0 = ruleXExpression )
{ // InternalXbase . g : 3024:6 : ( lv _ switch _ 5_0 = ruleXExpression )
// InternalXbase . g : 3025:7 : lv _ switch _ 5_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSwitchExpressionAccess ( ) . getSwitchXExpressionParserRuleCall_2_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_29 ) ; lv_switch_5_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSwitchExpressionRule ( ) ) ; } set ( current , "switch" , lv_switch_5_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } otherlv_6 = ( Token ) match ( input , 50 , FOLLOW_32 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_6 , grammarAccess . getXSwitchExpressionAccess ( ) . getRightParenthesisKeyword_2_0_2 ( ) ) ; } } } break ; case 2 : // InternalXbase . g : 3048:4 : ( ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) ) )
{ // InternalXbase . g : 3048:4 : ( ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) ) )
// InternalXbase . g : 3049:5 : ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ? ( ( lv _ switch _ 9_0 = ruleXExpression ) )
{ // InternalXbase . g : 3049:5 : ( ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' ) ) ?
int alt48 = 2 ; alt48 = dfa48 . predict ( input ) ; switch ( alt48 ) { case 1 : // InternalXbase . g : 3050:6 : ( ( ( ( ruleJvmFormalParameter ) ) ' : ' ) ) = > ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' )
{ // InternalXbase . g : 3059:6 : ( ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : ' )
// InternalXbase . g : 3060:7 : ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) ) otherlv _ 8 = ' : '
{ // InternalXbase . g : 3060:7 : ( ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter ) )
// InternalXbase . g : 3061:8 : ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter )
{ // InternalXbase . g : 3061:8 : ( lv _ declaredParam _ 7_0 = ruleJvmFormalParameter )
// InternalXbase . g : 3062:9 : lv _ declaredParam _ 7_0 = ruleJvmFormalParameter
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSwitchExpressionAccess ( ) . getDeclaredParamJvmFormalParameterParserRuleCall_2_1_0_0_0_0 ( ) ) ; } pushFollow ( FOLLOW_47 ) ; lv_declaredParam_7_0 = ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSwitchExpressionRule ( ) ) ; } set ( current , "declaredParam" , lv_declaredParam_7_0 , "org.eclipse.xtext.xbase.Xbase.JvmFormalParameter" ) ; afterParserOrEnumRuleCall ( ) ; } } } otherlv_8 = ( Token ) match ( input , 61 , FOLLOW_4 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_8 , grammarAccess . getXSwitchExpressionAccess ( ) . getColonKeyword_2_1_0_0_1 ( ) ) ; } } } break ; } // InternalXbase . g : 3085:5 : ( ( lv _ switch _ 9_0 = ruleXExpression ) )
// InternalXbase . g : 3086:6 : ( lv _ switch _ 9_0 = ruleXExpression )
{ // InternalXbase . g : 3086:6 : ( lv _ switch _ 9_0 = ruleXExpression )
// InternalXbase . g : 3087:7 : lv _ switch _ 9_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSwitchExpressionAccess ( ) . getSwitchXExpressionParserRuleCall_2_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_32 ) ; lv_switch_9_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSwitchExpressionRule ( ) ) ; } set ( current , "switch" , lv_switch_9_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } break ; } otherlv_10 = ( Token ) match ( input , 52 , FOLLOW_48 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_10 , grammarAccess . getXSwitchExpressionAccess ( ) . getLeftCurlyBracketKeyword_3 ( ) ) ; } // InternalXbase . g : 3110:3 : ( ( lv _ cases _ 11_0 = ruleXCasePart ) ) *
loop50 : do { int alt50 = 2 ; int LA50_0 = input . LA ( 1 ) ; if ( ( LA50_0 == RULE_ID || LA50_0 == 32 || ( LA50_0 >= 48 && LA50_0 <= 49 ) || LA50_0 == 61 || LA50_0 == 63 ) ) { alt50 = 1 ; } switch ( alt50 ) { case 1 : // InternalXbase . g : 3111:4 : ( lv _ cases _ 11_0 = ruleXCasePart )
{ // InternalXbase . g : 3111:4 : ( lv _ cases _ 11_0 = ruleXCasePart )
// InternalXbase . g : 3112:5 : lv _ cases _ 11_0 = ruleXCasePart
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSwitchExpressionAccess ( ) . getCasesXCasePartParserRuleCall_4_0 ( ) ) ; } pushFollow ( FOLLOW_48 ) ; lv_cases_11_0 = ruleXCasePart ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSwitchExpressionRule ( ) ) ; } add ( current , "cases" , lv_cases_11_0 , "org.eclipse.xtext.xbase.Xbase.XCasePart" ) ; afterParserOrEnumRuleCall ( ) ; } } } break ; default : break loop50 ; } } while ( true ) ; // InternalXbase . g : 3129:3 : ( otherlv _ 12 = ' default ' otherlv _ 13 = ' : ' ( ( lv _ default _ 14_0 = ruleXExpression ) ) ) ?
int alt51 = 2 ; int LA51_0 = input . LA ( 1 ) ; if ( ( LA51_0 == 62 ) ) { alt51 = 1 ; } switch ( alt51 ) { case 1 : // InternalXbase . g : 3130:4 : otherlv _ 12 = ' default ' otherlv _ 13 = ' : ' ( ( lv _ default _ 14_0 = ruleXExpression ) )
{ otherlv_12 = ( Token ) match ( input , 62 , FOLLOW_47 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_12 , grammarAccess . getXSwitchExpressionAccess ( ) . getDefaultKeyword_5_0 ( ) ) ; } otherlv_13 = ( Token ) match ( input , 61 , FOLLOW_4 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_13 , grammarAccess . getXSwitchExpressionAccess ( ) . getColonKeyword_5_1 ( ) ) ; } // InternalXbase . g : 3138:4 : ( ( lv _ default _ 14_0 = ruleXExpression ) )
// InternalXbase . g : 3139:5 : ( lv _ default _ 14_0 = ruleXExpression )
{ // InternalXbase . g : 3139:5 : ( lv _ default _ 14_0 = ruleXExpression )
// InternalXbase . g : 3140:6 : lv _ default _ 14_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSwitchExpressionAccess ( ) . getDefaultXExpressionParserRuleCall_5_2_0 ( ) ) ; } pushFollow ( FOLLOW_49 ) ; lv_default_14_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSwitchExpressionRule ( ) ) ; } set ( current , "default" , lv_default_14_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } otherlv_15 = ( Token ) match ( input , 53 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_15 , grammarAccess . getXSwitchExpressionAccess ( ) . getRightCurlyBracketKeyword_6 ( ) ) ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcStructuralCurveActivityTypeEnum ( ) { } } | if ( ifcStructuralCurveActivityTypeEnumEEnum == null ) { ifcStructuralCurveActivityTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1073 ) ; } return ifcStructuralCurveActivityTypeEnumEEnum ; |
public class NOPCSImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case AfplibPackage . NOPCS__IGNDATA : return getIGNDATA ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class PgResultSet { /** * # endif */
@ Override public Timestamp getTimestamp ( int i , java . util . Calendar cal ) throws SQLException { } } | checkResultSet ( i ) ; if ( wasNullFlag ) { return null ; } if ( cal == null ) { cal = getDefaultCalendar ( ) ; } int col = i - 1 ; int oid = fields [ col ] . getOID ( ) ; if ( isBinary ( i ) ) { if ( oid == Oid . TIMESTAMPTZ || oid == Oid . TIMESTAMP ) { boolean hasTimeZone = oid == Oid . TIMESTAMPTZ ; TimeZone tz = cal . getTimeZone ( ) ; return connection . getTimestampUtils ( ) . toTimestampBin ( tz , thisRow [ col ] , hasTimeZone ) ; } else { // JDBC spec says getTimestamp of Time and Date must be supported
long millis ; if ( oid == Oid . TIME || oid == Oid . TIMETZ ) { millis = getTime ( i , cal ) . getTime ( ) ; } else if ( oid == Oid . DATE ) { millis = getDate ( i , cal ) . getTime ( ) ; } else { throw new PSQLException ( GT . tr ( "Cannot convert the column of type {0} to requested type {1}." , Oid . toString ( oid ) , "timestamp" ) , PSQLState . DATA_TYPE_MISMATCH ) ; } return new Timestamp ( millis ) ; } } // If this is actually a timestamptz , the server - provided timezone will override
// the one we pass in , which is the desired behaviour . Otherwise , we ' ll
// interpret the timezone - less value in the provided timezone .
String string = getString ( i ) ; if ( oid == Oid . TIME || oid == Oid . TIMETZ ) { // If server sends us a TIME , we ensure java counterpart has date of 1970-01-01
return new Timestamp ( connection . getTimestampUtils ( ) . toTime ( cal , string ) . getTime ( ) ) ; } return connection . getTimestampUtils ( ) . toTimestamp ( cal , string ) ; |
public class SwingGui { /** * Returns the { @ link FileWindow } for the given URL . */
FileWindow getFileWindow ( String url ) { } } | if ( url == null || url . equals ( "<stdin>" ) ) { return null ; } return fileWindows . get ( url ) ; |
public class ISUPMessageFactoryImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . isup . ISUPMessageFactory # createUBA ( int ) */
@ Override public UnblockingAckMessage createUBA ( int cic ) { } } | UnblockingAckMessage msg = createUBA ( ) ; CircuitIdentificationCode code = this . parameterFactory . createCircuitIdentificationCode ( ) ; code . setCIC ( cic ) ; msg . setCircuitIdentificationCode ( code ) ; return msg ; |
public class MessageSetImpl { /** * Gets messages in the given channel around a given message in any channel until one that meets the given
* condition is found . If no message matches the condition , an empty set is returned .
* The given message will be part of the result in addition to the messages around if it was sent in the given
* channel and is matched against the condition and will abort retrieval .
* Half of the messages will be older than the given message and half of the messages will be newer .
* If there aren ' t enough older or newer messages , the halves will not be same - sized .
* It ' s also not guaranteed to be perfectly balanced .
* @ param channel The channel of the messages .
* @ param condition The abort condition for when to stop retrieving messages .
* @ param around Get messages around the message with this id .
* @ return The messages . */
public static CompletableFuture < MessageSet > getMessagesAroundUntil ( TextChannel channel , Predicate < Message > condition , long around ) { } } | CompletableFuture < MessageSet > future = new CompletableFuture < > ( ) ; channel . getApi ( ) . getThreadPool ( ) . getExecutorService ( ) . submit ( ( ) -> { try { List < Message > messages = new ArrayList < > ( ) ; Optional < Message > untilMessage = getMessagesAroundAsStream ( channel , around ) . peek ( messages :: add ) . filter ( condition ) . findFirst ( ) ; future . complete ( new MessageSetImpl ( untilMessage . map ( message -> messages ) . orElse ( Collections . emptyList ( ) ) ) ) ; } catch ( Throwable t ) { future . completeExceptionally ( t ) ; } } ) ; return future ; |
public class Jenkins { /** * Gets the { @ link Jenkins } singleton .
* @ return { @ link Jenkins } instance
* @ throws IllegalStateException for the reasons that { @ link # getInstanceOrNull } might return null
* @ since 2.98 */
@ Nonnull public static Jenkins get ( ) throws IllegalStateException { } } | Jenkins instance = getInstanceOrNull ( ) ; if ( instance == null ) { throw new IllegalStateException ( "Jenkins.instance is missing. Read the documentation of Jenkins.getInstanceOrNull to see what you are doing wrong." ) ; } return instance ; |
public class Builder { /** * Use custom serialization / deserialization to store and retrieve objects from disk cache .
* @ param maxDiskSizeBytes is the max size of disk in bytes which an be used by the disk cache
* layer .
* @ param usePrivateFiles is true if you want to use { @ link Context # MODE _ PRIVATE } with the
* default disk cache folder .
* @ param serializer provides serialization / deserialization methods for the disk cache
* layer .
* @ param context is used to access file system .
* @ return the builder . */
public Builder < T > useSerializerInDisk ( int maxDiskSizeBytes , boolean usePrivateFiles , CacheSerializer < T > serializer , Context context ) { } } | File folder = getDefaultDiskCacheFolder ( usePrivateFiles , context ) ; return useSerializerInDisk ( maxDiskSizeBytes , folder , serializer ) ; |
public class MaxThreadTrackerService { /** * Not needed until workers support dynamic removal ( currently reload required ) */
void unregisterWorkerMax ( String name ) { } } | synchronized ( workers ) { Integer val = workers . remove ( name ) ; if ( val != null ) { total -= val ; } } |
public class ExpressionParser { /** * It reads an expression from a reader and fill a tree
* @ param reader the reader to be used as the character source , must not be null
* @ param tree the result tree to be filled by read items , must not be null
* @ param context a preprocessor context to be used for variables
* @ param insideBracket the flag shows that the expression can be ended by a bracket
* @ param argument the flag shows that the expression can be ended by a comma
* @ return the last read expression item ( a comma or a bracket for instance ) , it can be null
* @ throws IOException it will be thrown if there is a problem in reading from the reader */
@ Nullable public ExpressionItem readExpression ( @ Nonnull final PushbackReader reader , @ Nonnull final ExpressionTree tree , @ Nonnull final PreprocessorContext context , final boolean insideBracket , final boolean argument ) throws IOException { } } | boolean working = true ; ExpressionItem result = null ; final FilePositionInfo [ ] stack ; final String sourceLine ; final PreprocessingState state = context . getPreprocessingState ( ) ; stack = state . makeIncludeStack ( ) ; sourceLine = state . getLastReadString ( ) ; ExpressionItem prev = null ; while ( working ) { final ExpressionItem nextItem = nextItem ( reader , context ) ; if ( nextItem == null ) { working = false ; result = null ; } else if ( nextItem . getExpressionItemType ( ) == ExpressionItemType . SPECIAL ) { if ( nextItem == SpecialItem . BRACKET_CLOSING ) { if ( insideBracket ) { working = false ; result = nextItem ; } else if ( argument ) { working = false ; result = nextItem ; } else { final String text = "Detected alone closing bracket" ; throw context . makeException ( "Detected alone closing bracket" , null ) ; } } else if ( nextItem == SpecialItem . BRACKET_OPENING ) { if ( prev != null && prev . getExpressionItemType ( ) == ExpressionItemType . VARIABLE ) { final String text = "Unknown function detected [" + prev . toString ( ) + ']' ; throw context . makeException ( text , null ) ; } final ExpressionTree subExpression ; subExpression = new ExpressionTree ( stack , sourceLine ) ; if ( SpecialItem . BRACKET_CLOSING != readExpression ( reader , subExpression , context , true , false ) ) { final String text = "Detected unclosed bracket" ; throw context . makeException ( text , null ) ; } tree . addTree ( subExpression ) ; } else if ( nextItem == SpecialItem . COMMA ) { return nextItem ; } } else if ( nextItem . getExpressionItemType ( ) == ExpressionItemType . FUNCTION ) { final AbstractFunction function = ( AbstractFunction ) nextItem ; ExpressionTree functionTree = readFunction ( function , reader , context , stack , sourceLine ) ; tree . addTree ( functionTree ) ; } else { tree . addItem ( nextItem ) ; } prev = nextItem ; } return result ; |
public class Application { /** * Get the connection to the server for this applet .
* Optionally create the server connection .
* @ param localTaskOwner The task that will own this remote task ( or application ) server ) [ If null , get the app server ] .
* @ param strUserID The user id ( or name ) to initialize the server ' s application to .
* @ param bCreateIfNotFound If the server is null , initialize the server .
* @ return The server object ( application defined ) . */
public RemoteTask getRemoteTask ( Task localTaskOwner , String strUserID , String strPassword , boolean bCreateIfNotFound ) { } } | if ( localTaskOwner == null ) localTaskOwner = m_taskMain ; // No task = main task
RemoteTask server = m_mapTasks . get ( localTaskOwner ) ; if ( server == null ) if ( bCreateIfNotFound ) { String strServer = this . getAppServerName ( ) ; String strRemoteApp = null ; if ( localTaskOwner != null ) strRemoteApp = localTaskOwner . getProperty ( Params . REMOTE_APP_NAME ) ; if ( ( strRemoteApp == null ) || ( strRemoteApp . length ( ) == 0 ) ) strRemoteApp = this . getProperty ( Params . REMOTE_APP_NAME ) ; if ( ( strRemoteApp == null ) || ( strRemoteApp . length ( ) == 0 ) ) strRemoteApp = Params . DEFAULT_REMOTE_APP ; if ( strUserID == null ) strUserID = this . getProperty ( Params . USER_ID ) ; if ( strPassword == null ) strPassword = this . getProperty ( Params . PASSWORD ) ; server = this . createRemoteTask ( strServer , strRemoteApp , strUserID , strPassword ) ; if ( server != null ) { if ( localTaskOwner == null ) localTaskOwner = new AutoTask ( this , null , null ) ; // Default task = server task
this . addTask ( localTaskOwner , server ) ; // NOTE : IF autotask was just created , I re - add this with a remote pointer
} } return server ; |
public class HexUtil { /** * Creates a byte array from a CharSequence ( String , StringBuilder , etc . )
* containing only valid hexidecimal formatted characters .
* Each grouping of 2 characters represent a byte in " Big Endian " format .
* The hex CharSequence must be an even length of characters . For example , a String
* of " 1234 " would return the byte array { 0x12 , 0x34 } .
* @ param hexString The String , StringBuilder , etc . that contains the
* sequence of hexidecimal character values .
* @ param offset The offset within the sequence to start from . If the offset
* is invalid , will cause an IllegalArgumentException .
* @ param length The length from the offset to convert . If the length
* is invalid , will cause an IllegalArgumentException .
* @ return A new byte array representing the sequence of bytes created from
* the sequence of hexidecimal characters . If the hexString is null ,
* then this method will return null . */
public static byte [ ] toByteArray ( CharSequence hexString , int offset , int length ) { } } | if ( hexString == null ) { return null ; } assertOffsetLengthValid ( offset , length , hexString . length ( ) ) ; // a hex string must be in increments of 2
if ( ( length % 2 ) != 0 ) { throw new IllegalArgumentException ( "The hex string did not contain an even number of characters [actual=" + length + "]" ) ; } // convert hex string to byte array
byte [ ] bytes = new byte [ length / 2 ] ; int j = 0 ; int end = offset + length ; for ( int i = offset ; i < end ; i += 2 ) { int highNibble = hexCharToIntValue ( hexString . charAt ( i ) ) ; int lowNibble = hexCharToIntValue ( hexString . charAt ( i + 1 ) ) ; bytes [ j ++ ] = ( byte ) ( ( ( highNibble << 4 ) & 0xF0 ) | ( lowNibble & 0x0F ) ) ; } return bytes ; |
public class MeetingApplicationConfiguration { /** * Persistence Manager factory . This determines your database connection . This would have the same usage if
* you were connecting to an embedded or remote database . The only difference would be the factory type .
* @ return Initialized Persistence Manager Factory */
@ Bean protected PersistenceManagerFactory persistenceManagerFactory ( ) throws InitializationException { } } | CacheManagerFactory cacheManagerFactory = new CacheManagerFactory ( ) ; cacheManagerFactory . initialize ( ) ; return cacheManagerFactory ; |
public class Sendout { /** * Returns the entity with the required fields for an insert set .
* @ return */
public Sendout instantiateForInsert ( ) { } } | Sendout entity = new Sendout ( ) ; entity . setIsRead ( Boolean . FALSE ) ; entity . setCandidate ( new Candidate ( 1 ) ) ; entity . setUser ( new CorporateUser ( 1 ) ) ; return entity ; |
public class RythmEngine { /** * Constructors , Configuration and Initializing */
private void _initLogger ( Map < String , ? > conf ) { } } | boolean logEnabled = ( Boolean ) RythmConfigurationKey . LOG_ENABLED . getConfiguration ( conf ) ; if ( logEnabled ) { ILoggerFactory factory = RythmConfigurationKey . LOG_FACTORY_IMPL . getConfiguration ( conf ) ; Logger . registerLoggerFactory ( factory ) ; } else { Logger . registerLoggerFactory ( new NullLogger . Factory ( ) ) ; } |
public class Transaction { /** * Opens a connection for the specified execution .
* @ param event the event causing the open
* @ throws SQLException
* @ throws PersistenceException */
private synchronized void open ( Execution event , String dsn ) throws SQLException , PersistenceException { } } | try { Connection conn ; if ( connection != null ) { return ; } state = "OPENING" ; try { InitialContext ctx = new InitialContext ( ) ; DataSource ds ; if ( dsn == null ) { dsn = event . getDataSource ( ) ; } if ( dsn == null ) { throw new PersistenceException ( "No data source name" ) ; } state = "LOOKING UP" ; ds = dsCache . get ( dsn ) ; if ( ds == null ) { ds = ( DataSource ) ctx . lookup ( dsn ) ; if ( ds != null ) { dsCache . put ( dsn , ds ) ; } } if ( ds == null ) { throw new PersistenceException ( "Could not find data source: " + dsn ) ; } conn = ds . getConnection ( ) ; openTime = System . currentTimeMillis ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "DPTRANSID-" + transactionId + " connection.get - dsn='" + dsn + '\'' ) ; } state = "CONNECTED" ; } catch ( NamingException e ) { logger . error ( "Problem with datasource: " + e . getMessage ( ) ) ; throw new PersistenceException ( e . getMessage ( ) ) ; } conn . setAutoCommit ( false ) ; conn . setReadOnly ( readOnly ) ; connection = conn ; if ( tracking ) { connections . incrementAndGet ( ) ; transactions . put ( new Integer ( transactionId ) , this ) ; } } finally { } |
public class NameService { /** * This method will return the event listener < code > ArrayList < / code > when called . The event listener is
* lazily created .
* @ return The listener array list . */
private ArrayList getListener ( ) { } } | if ( _listeners != null ) return _listeners ; synchronized ( this ) { if ( _listeners != null ) return _listeners ; _listeners = new ArrayList ( ) ; } return _listeners ; |
public class MultipleOutputs { /** * Checks if a named output is alreadyDefined or not .
* @ param conf job conf
* @ param namedOutput named output names
* @ param alreadyDefined whether the existence / non - existence of
* the named output is to be checked
* @ throws IllegalArgumentException if the output name is alreadyDefined or
* not depending on the value of the
* ' alreadyDefined ' parameter */
private static void checkNamedOutput ( JobConf conf , String namedOutput , boolean alreadyDefined ) { } } | List < String > definedChannels = getNamedOutputsList ( conf ) ; if ( alreadyDefined && definedChannels . contains ( namedOutput ) ) { throw new IllegalArgumentException ( "Named output '" + namedOutput + "' already alreadyDefined" ) ; } else if ( ! alreadyDefined && ! definedChannels . contains ( namedOutput ) ) { throw new IllegalArgumentException ( "Named output '" + namedOutput + "' not defined" ) ; } |
public class SearchableTextComponent { /** * Find the next appearance of the search panel query */
void doFindNext ( ) { } } | String query = searchPanel . getQuery ( ) ; if ( query . isEmpty ( ) ) { return ; } String text = getDocumentText ( ) ; boolean ignoreCase = ! searchPanel . isCaseSensitive ( ) ; int caretPosition = textComponent . getCaretPosition ( ) ; int textLength = text . length ( ) ; int newCaretPosition = ( caretPosition + 1 ) % textLength ; Point match = JTextComponents . findNext ( text , query , newCaretPosition , ignoreCase ) ; if ( match == null ) { match = JTextComponents . findNext ( text , query , 0 , ignoreCase ) ; } handleMatch ( match ) ; |
public class TransportClientFactory { /** * Create a completely new { @ link TransportClient } to the remote address . */
private TransportClient createClient ( InetSocketAddress address ) throws IOException , InterruptedException { } } | logger . debug ( "Creating new connection to {}" , address ) ; Bootstrap bootstrap = new Bootstrap ( ) ; bootstrap . group ( workerGroup ) . channel ( socketChannelClass ) // Disable Nagle ' s Algorithm since we don ' t want packets to wait
. option ( ChannelOption . TCP_NODELAY , true ) . option ( ChannelOption . SO_KEEPALIVE , true ) . option ( ChannelOption . CONNECT_TIMEOUT_MILLIS , conf . connectionTimeoutMs ( ) ) . option ( ChannelOption . ALLOCATOR , pooledAllocator ) ; if ( conf . receiveBuf ( ) > 0 ) { bootstrap . option ( ChannelOption . SO_RCVBUF , conf . receiveBuf ( ) ) ; } if ( conf . sendBuf ( ) > 0 ) { bootstrap . option ( ChannelOption . SO_SNDBUF , conf . sendBuf ( ) ) ; } final AtomicReference < TransportClient > clientRef = new AtomicReference < > ( ) ; final AtomicReference < Channel > channelRef = new AtomicReference < > ( ) ; bootstrap . handler ( new ChannelInitializer < SocketChannel > ( ) { @ Override public void initChannel ( SocketChannel ch ) { TransportChannelHandler clientHandler = context . initializePipeline ( ch ) ; clientRef . set ( clientHandler . getClient ( ) ) ; channelRef . set ( ch ) ; } } ) ; // Connect to the remote server
long preConnect = System . nanoTime ( ) ; ChannelFuture cf = bootstrap . connect ( address ) ; if ( ! cf . await ( conf . connectionTimeoutMs ( ) ) ) { throw new IOException ( String . format ( "Connecting to %s timed out (%s ms)" , address , conf . connectionTimeoutMs ( ) ) ) ; } else if ( cf . cause ( ) != null ) { throw new IOException ( String . format ( "Failed to connect to %s" , address ) , cf . cause ( ) ) ; } TransportClient client = clientRef . get ( ) ; Channel channel = channelRef . get ( ) ; assert client != null : "Channel future completed successfully with null client" ; // Execute any client bootstraps synchronously before marking the Client as successful .
long preBootstrap = System . nanoTime ( ) ; logger . debug ( "Connection to {} successful, running bootstraps..." , address ) ; try { for ( TransportClientBootstrap clientBootstrap : clientBootstraps ) { clientBootstrap . doBootstrap ( client , channel ) ; } } catch ( Exception e ) { // catch non - RuntimeExceptions too as bootstrap may be written in Scala
long bootstrapTimeMs = ( System . nanoTime ( ) - preBootstrap ) / 1000000 ; logger . error ( "Exception while bootstrapping client after " + bootstrapTimeMs + " ms" , e ) ; client . close ( ) ; throw Throwables . propagate ( e ) ; } long postBootstrap = System . nanoTime ( ) ; logger . info ( "Successfully created connection to {} after {} ms ({} ms spent in bootstraps)" , address , ( postBootstrap - preConnect ) / 1000000 , ( postBootstrap - preBootstrap ) / 1000000 ) ; return client ; |
public class DefaultDiskStorage { /** * Gets the directory to use to store the given key
* @ param resourceId the id of the file we ' re going to store
* @ return the directory to store the file in */
private String getSubdirectoryPath ( String resourceId ) { } } | String subdirectory = String . valueOf ( Math . abs ( resourceId . hashCode ( ) % SHARDING_BUCKET_COUNT ) ) ; return mVersionDirectory + File . separator + subdirectory ; |
public class MaterialRange { /** * Register the ChangeHandler to become notified if the user changes the slider .
* The Handler is called when the user releases the mouse only at the end of the slide
* operation . */
@ Override public HandlerRegistration addChangeHandler ( final ChangeHandler handler ) { } } | return getRangeInputElement ( ) . addDomHandler ( handler , ChangeEvent . getType ( ) ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.