idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
143,700
private void processResources ( ) throws SQLException { List < Row > rows = getRows ( "select * from zresource where zproject=? order by zorderinproject" , m_projectID ) ; for ( Row row : rows ) { Resource resource = m_project . addResource ( ) ; resource . setUniqueID ( row . getInteger ( "Z_PK" ) ) ; resource . setEmailAddress ( row . getString ( "ZEMAIL" ) ) ; resource . setInitials ( row . getString ( "ZINITIALS" ) ) ; resource . setName ( row . getString ( "ZTITLE_" ) ) ; resource . setGUID ( row . getUUID ( "ZUNIQUEID" ) ) ; resource . setType ( row . getResourceType ( "ZTYPE" ) ) ; resource . setMaterialLabel ( row . getString ( "ZMATERIALUNIT" ) ) ; if ( resource . getType ( ) == ResourceType . WORK ) { resource . setMaxUnits ( Double . valueOf ( NumberHelper . getDouble ( row . getDouble ( "ZAVAILABLEUNITS_" ) ) * 100.0 ) ) ; } Integer calendarID = row . getInteger ( "ZRESOURCECALENDAR" ) ; if ( calendarID != null ) { ProjectCalendar calendar = m_project . getCalendarByUniqueID ( calendarID ) ; if ( calendar != null ) { calendar . setName ( resource . getName ( ) ) ; resource . setResourceCalendar ( calendar ) ; } } m_eventManager . fireResourceReadEvent ( resource ) ; } }
Read resource data .
361
4
143,701
private void processTasks ( ) throws SQLException { // // Yes... we could probably read this in one query in the right order // using a CTE... but life's too short. // List < Row > rows = getRows ( "select * from zscheduleitem where zproject=? and zparentactivity_ is null and z_ent=? order by zorderinparentactivity" , m_projectID , m_entityMap . get ( "Activity" ) ) ; for ( Row row : rows ) { Task task = m_project . addTask ( ) ; populateTask ( row , task ) ; processChildTasks ( task ) ; } }
Read all top level tasks .
143
6
143,702
private void processChildTasks ( Task parentTask ) throws SQLException { List < Row > rows = getRows ( "select * from zscheduleitem where zparentactivity_=? and z_ent=? order by zorderinparentactivity" , parentTask . getUniqueID ( ) , m_entityMap . get ( "Activity" ) ) ; for ( Row row : rows ) { Task task = parentTask . addTask ( ) ; populateTask ( row , task ) ; processChildTasks ( task ) ; } }
Read all child tasks for a given parent .
116
9
143,703
private void populateTask ( Row row , Task task ) { task . setUniqueID ( row . getInteger ( "Z_PK" ) ) ; task . setName ( row . getString ( "ZTITLE" ) ) ; task . setPriority ( Priority . getInstance ( row . getInt ( "ZPRIORITY" ) ) ) ; task . setMilestone ( row . getBoolean ( "ZISMILESTONE" ) ) ; task . setActualFinish ( row . getTimestamp ( "ZGIVENACTUALENDDATE_" ) ) ; task . setActualStart ( row . getTimestamp ( "ZGIVENACTUALSTARTDATE_" ) ) ; task . setNotes ( row . getString ( "ZOBJECTDESCRIPTION" ) ) ; task . setDuration ( row . getDuration ( "ZGIVENDURATION_" ) ) ; task . setOvertimeWork ( row . getWork ( "ZGIVENWORKOVERTIME_" ) ) ; task . setWork ( row . getWork ( "ZGIVENWORK_" ) ) ; task . setLevelingDelay ( row . getDuration ( "ZLEVELINGDELAY_" ) ) ; task . setActualOvertimeWork ( row . getWork ( "ZGIVENACTUALWORKOVERTIME_" ) ) ; task . setActualWork ( row . getWork ( "ZGIVENACTUALWORK_" ) ) ; task . setRemainingWork ( row . getWork ( "ZGIVENACTUALWORK_" ) ) ; task . setGUID ( row . getUUID ( "ZUNIQUEID" ) ) ; Integer calendarID = row . getInteger ( "ZGIVENCALENDAR" ) ; if ( calendarID != null ) { ProjectCalendar calendar = m_project . getCalendarByUniqueID ( calendarID ) ; if ( calendar != null ) { task . setCalendar ( calendar ) ; } } populateConstraints ( row , task ) ; // Percent complete is calculated bottom up from assignments and actual work vs. planned work m_eventManager . fireTaskReadEvent ( task ) ; }
Read data for an individual task .
479
7
143,704
private void populateConstraints ( Row row , Task task ) { Date endDateMax = row . getTimestamp ( "ZGIVENENDDATEMAX_" ) ; Date endDateMin = row . getTimestamp ( "ZGIVENENDDATEMIN_" ) ; Date startDateMax = row . getTimestamp ( "ZGIVENSTARTDATEMAX_" ) ; Date startDateMin = row . getTimestamp ( "ZGIVENSTARTDATEMIN_" ) ; ConstraintType constraintType = null ; Date constraintDate = null ; if ( endDateMax != null ) { constraintType = ConstraintType . FINISH_NO_LATER_THAN ; constraintDate = endDateMax ; } if ( endDateMin != null ) { constraintType = ConstraintType . FINISH_NO_EARLIER_THAN ; constraintDate = endDateMin ; } if ( endDateMin != null && endDateMin == endDateMax ) { constraintType = ConstraintType . MUST_FINISH_ON ; constraintDate = endDateMin ; } if ( startDateMax != null ) { constraintType = ConstraintType . START_NO_LATER_THAN ; constraintDate = startDateMax ; } if ( startDateMin != null ) { constraintType = ConstraintType . START_NO_EARLIER_THAN ; constraintDate = startDateMin ; } if ( startDateMin != null && startDateMin == endDateMax ) { constraintType = ConstraintType . MUST_START_ON ; constraintDate = endDateMin ; } task . setConstraintType ( constraintType ) ; task . setConstraintDate ( constraintDate ) ; }
Populate the constraint type and constraint date . Note that Merlin allows both start and end constraints simultaneously . As we can t have both we ll prefer the start constraint .
380
33
143,705
private void processAssignments ( ) throws SQLException { List < Row > rows = getRows ( "select * from zscheduleitem where zproject=? and z_ent=? order by zorderinactivity" , m_projectID , m_entityMap . get ( "Assignment" ) ) ; for ( Row row : rows ) { Task task = m_project . getTaskByUniqueID ( row . getInteger ( "ZACTIVITY_" ) ) ; Resource resource = m_project . getResourceByUniqueID ( row . getInteger ( "ZRESOURCE" ) ) ; if ( task != null && resource != null ) { ResourceAssignment assignment = task . addResourceAssignment ( resource ) ; assignment . setGUID ( row . getUUID ( "ZUNIQUEID" ) ) ; assignment . setActualFinish ( row . getTimestamp ( "ZGIVENACTUALENDDATE_" ) ) ; assignment . setActualStart ( row . getTimestamp ( "ZGIVENACTUALSTARTDATE_" ) ) ; assignment . setWork ( assignmentDuration ( task , row . getWork ( "ZGIVENWORK_" ) ) ) ; assignment . setOvertimeWork ( assignmentDuration ( task , row . getWork ( "ZGIVENWORKOVERTIME_" ) ) ) ; assignment . setActualWork ( assignmentDuration ( task , row . getWork ( "ZGIVENACTUALWORK_" ) ) ) ; assignment . setActualOvertimeWork ( assignmentDuration ( task , row . getWork ( "ZGIVENACTUALWORKOVERTIME_" ) ) ) ; assignment . setRemainingWork ( assignmentDuration ( task , row . getWork ( "ZGIVENREMAININGWORK_" ) ) ) ; assignment . setLevelingDelay ( row . getDuration ( "ZLEVELINGDELAY_" ) ) ; if ( assignment . getRemainingWork ( ) == null ) { assignment . setRemainingWork ( assignment . getWork ( ) ) ; } if ( resource . getType ( ) == ResourceType . WORK ) { assignment . setUnits ( Double . valueOf ( NumberHelper . getDouble ( row . getDouble ( "ZRESOURCEUNITS_" ) ) * 100.0 ) ) ; } } } }
Read assignment data .
512
4
143,706
private Duration assignmentDuration ( Task task , Duration work ) { Duration result = work ; if ( result != null ) { if ( result . getUnits ( ) == TimeUnit . PERCENT ) { Duration taskWork = task . getWork ( ) ; if ( taskWork != null ) { result = Duration . getInstance ( taskWork . getDuration ( ) * result . getDuration ( ) , taskWork . getUnits ( ) ) ; } } } return result ; }
Extract a duration amount from the assignment converting a percentage into an actual duration .
99
16
143,707
private void processDependencies ( ) throws SQLException { List < Row > rows = getRows ( "select * from zdependency where zproject=?" , m_projectID ) ; for ( Row row : rows ) { Task nextTask = m_project . getTaskByUniqueID ( row . getInteger ( "ZNEXTACTIVITY_" ) ) ; Task prevTask = m_project . getTaskByUniqueID ( row . getInteger ( "ZPREVIOUSACTIVITY_" ) ) ; Duration lag = row . getDuration ( "ZLAG_" ) ; RelationType type = row . getRelationType ( "ZTYPE" ) ; Relation relation = nextTask . addPredecessor ( prevTask , type , lag ) ; relation . setUniqueID ( row . getInteger ( "Z_PK" ) ) ; } }
Read relation data .
189
4
143,708
private NodeList getNodeList ( String document , XPathExpression expression ) throws Exception { Document doc = m_documentBuilder . parse ( new InputSource ( new StringReader ( document ) ) ) ; return ( NodeList ) expression . evaluate ( doc , XPathConstants . NODESET ) ; }
Retrieve a node list based on an XPath expression .
65
12
143,709
public void process ( ProjectProperties properties , FilterContainer filters , FixedData fixedData , Var2Data varData ) { int filterCount = fixedData . getItemCount ( ) ; boolean [ ] criteriaType = new boolean [ 2 ] ; CriteriaReader criteriaReader = getCriteriaReader ( ) ; for ( int filterLoop = 0 ; filterLoop < filterCount ; filterLoop ++ ) { byte [ ] filterFixedData = fixedData . getByteArrayValue ( filterLoop ) ; if ( filterFixedData == null || filterFixedData . length < 4 ) { continue ; } Filter filter = new Filter ( ) ; filter . setID ( Integer . valueOf ( MPPUtility . getInt ( filterFixedData , 0 ) ) ) ; filter . setName ( MPPUtility . removeAmpersands ( MPPUtility . getUnicodeString ( filterFixedData , 4 ) ) ) ; byte [ ] filterVarData = varData . getByteArray ( filter . getID ( ) , getVarDataType ( ) ) ; if ( filterVarData == null ) { continue ; } //System.out.println(ByteArrayHelper.hexdump(filterVarData, true, 16, "")); List < GenericCriteriaPrompt > prompts = new LinkedList < GenericCriteriaPrompt > ( ) ; filter . setShowRelatedSummaryRows ( MPPUtility . getByte ( filterVarData , 4 ) != 0 ) ; filter . setCriteria ( criteriaReader . process ( properties , filterVarData , 0 , - 1 , prompts , null , criteriaType ) ) ; filter . setIsTaskFilter ( criteriaType [ 0 ] ) ; filter . setIsResourceFilter ( criteriaType [ 1 ] ) ; filter . setPrompts ( prompts ) ; filters . addFilter ( filter ) ; //System.out.println(filter); } }
Entry point for processing filter definitions .
398
7
143,710
@ SuppressWarnings ( "unchecked" ) public static TimeUnit getInstance ( String units , Locale locale ) throws MPXJException { Map < String , Integer > map = LocaleData . getMap ( locale , LocaleData . TIME_UNITS_MAP ) ; Integer result = map . get ( units . toLowerCase ( ) ) ; if ( result == null ) { throw new MPXJException ( MPXJException . INVALID_TIME_UNIT + " " + units ) ; } return ( TimeUnit . getInstance ( result . intValue ( ) ) ) ; }
This method is used to parse a string representation of a time unit and return the appropriate constant value .
130
20
143,711
public BlockHeader read ( byte [ ] buffer , int offset , int postHeaderSkipBytes ) { m_offset = offset ; System . arraycopy ( buffer , m_offset , m_header , 0 , 8 ) ; m_offset += 8 ; int nameLength = FastTrackUtility . getInt ( buffer , m_offset ) ; m_offset += 4 ; if ( nameLength < 1 || nameLength > 255 ) { throw new UnexpectedStructureException ( ) ; } m_name = new String ( buffer , m_offset , nameLength , CharsetHelper . UTF16LE ) ; m_offset += nameLength ; m_columnType = FastTrackUtility . getShort ( buffer , m_offset ) ; m_offset += 2 ; m_flags = FastTrackUtility . getShort ( buffer , m_offset ) ; m_offset += 2 ; m_skip = new byte [ postHeaderSkipBytes ] ; System . arraycopy ( buffer , m_offset , m_skip , 0 , postHeaderSkipBytes ) ; m_offset += postHeaderSkipBytes ; return this ; }
Reads the header data from a block .
234
9
143,712
public void process ( ProjectFile file , Var2Data varData , byte [ ] fixedData ) throws IOException { Props props = getProps ( varData ) ; //System.out.println(props); if ( props != null ) { String viewName = MPPUtility . removeAmpersands ( props . getUnicodeString ( VIEW_NAME ) ) ; byte [ ] listData = props . getByteArray ( VIEW_CONTENTS ) ; List < Integer > uniqueIdList = new LinkedList < Integer > ( ) ; if ( listData != null ) { for ( int index = 0 ; index < listData . length ; index += 4 ) { Integer uniqueID = Integer . valueOf ( MPPUtility . getInt ( listData , index ) ) ; // // Ensure that we have a valid task, and that if we have and // ID of zero, this is the first task shown. // if ( file . getTaskByUniqueID ( uniqueID ) != null && ( uniqueID . intValue ( ) != 0 || index == 0 ) ) { uniqueIdList . add ( uniqueID ) ; } } } int filterID = MPPUtility . getShort ( fixedData , 128 ) ; ViewState state = new ViewState ( file , viewName , uniqueIdList , filterID ) ; file . getViews ( ) . setViewState ( state ) ; } }
Entry point for processing saved view state .
300
8
143,713
@ SuppressWarnings ( "unchecked" ) public final List < MapRow > getRows ( String name ) { return ( List < MapRow > ) getObject ( name ) ; }
Retrieve row from a nested table .
42
8
143,714
public static String strip ( String text ) { String result = text ; if ( text != null && ! text . isEmpty ( ) ) { try { boolean formalRTF = isFormalRTF ( text ) ; StringTextConverter stc = new StringTextConverter ( ) ; stc . convert ( new RtfStringSource ( text ) ) ; result = stripExtraLineEnd ( stc . getText ( ) , formalRTF ) ; } catch ( IOException ex ) { result = "" ; } } return result ; }
This method removes all RTF formatting from a given piece of text .
115
14
143,715
private static String stripExtraLineEnd ( String text , boolean formalRTF ) { if ( formalRTF && text . endsWith ( "\n" ) ) { text = text . substring ( 0 , text . length ( ) - 1 ) ; } return text ; }
Remove the trailing line end from an RTF block .
57
11
143,716
private void processWorkWeeks ( byte [ ] data , int offset , ProjectCalendar cal ) { // System.out.println("Calendar=" + cal.getName()); // System.out.println("Work week block start offset=" + offset); // System.out.println(ByteArrayHelper.hexdump(data, true, 16, "")); // skip 4 byte header offset += 4 ; while ( data . length >= offset + ( ( 7 * 60 ) + 2 + 2 + 8 + 4 ) ) { //System.out.println("Week start offset=" + offset); ProjectCalendarWeek week = cal . addWorkWeek ( ) ; for ( Day day : Day . values ( ) ) { // 60 byte block per day processWorkWeekDay ( data , offset , week , day ) ; offset += 60 ; } Date startDate = DateHelper . getDayStartDate ( MPPUtility . getDate ( data , offset ) ) ; offset += 2 ; Date finishDate = DateHelper . getDayEndDate ( MPPUtility . getDate ( data , offset ) ) ; offset += 2 ; // skip unknown 8 bytes //System.out.println(ByteArrayHelper.hexdump(data, offset, 8, false)); offset += 8 ; // // Extract the name length - ensure that it is aligned to a 4 byte boundary // int nameLength = MPPUtility . getInt ( data , offset ) ; if ( nameLength % 4 != 0 ) { nameLength = ( ( nameLength / 4 ) + 1 ) * 4 ; } offset += 4 ; if ( nameLength != 0 ) { String name = MPPUtility . getUnicodeString ( data , offset , nameLength ) ; offset += nameLength ; week . setName ( name ) ; } week . setDateRange ( new DateRange ( startDate , finishDate ) ) ; // System.out.println(week); } }
Read the work weeks .
407
5
143,717
private void processWorkWeekDay ( byte [ ] data , int offset , ProjectCalendarWeek week , Day day ) { //System.out.println(ByteArrayHelper.hexdump(data, offset, 60, false)); int dayType = MPPUtility . getShort ( data , offset + 0 ) ; if ( dayType == 1 ) { week . setWorkingDay ( day , DayType . DEFAULT ) ; } else { ProjectCalendarHours hours = week . addCalendarHours ( day ) ; int rangeCount = MPPUtility . getShort ( data , offset + 2 ) ; if ( rangeCount == 0 ) { week . setWorkingDay ( day , DayType . NON_WORKING ) ; } else { week . setWorkingDay ( day , DayType . WORKING ) ; Calendar cal = DateHelper . popCalendar ( ) ; for ( int index = 0 ; index < rangeCount ; index ++ ) { Date startTime = DateHelper . getCanonicalTime ( MPPUtility . getTime ( data , offset + 8 + ( index * 2 ) ) ) ; int durationInSeconds = MPPUtility . getInt ( data , offset + 20 + ( index * 4 ) ) * 6 ; cal . setTime ( startTime ) ; cal . add ( Calendar . SECOND , durationInSeconds ) ; Date finishTime = DateHelper . getCanonicalTime ( cal . getTime ( ) ) ; hours . addRange ( new DateRange ( startTime , finishTime ) ) ; } DateHelper . pushCalendar ( cal ) ; } } }
Process an individual work week day .
339
7
143,718
private RecurrenceType getRecurrenceType ( int value ) { RecurrenceType result ; if ( value < 0 || value >= RECURRENCE_TYPES . length ) { result = null ; } else { result = RECURRENCE_TYPES [ value ] ; } return result ; }
Retrieve the recurrence type .
62
7
143,719
private boolean getRelative ( int value ) { boolean result ; if ( value < 0 || value >= RELATIVE_MAP . length ) { result = false ; } else { result = RELATIVE_MAP [ value ] ; } return result ; }
Determine if the exception is relative based on the recurrence type integer value .
51
17
143,720
public List < GanttDesignerRemark . Task > getTask ( ) { if ( task == null ) { task = new ArrayList < GanttDesignerRemark . Task > ( ) ; } return this . task ; }
Gets the value of the task property .
51
9
143,721
public void setLeftTableModel ( TableModel model ) { TableModel old = m_leftTable . getModel ( ) ; m_leftTable . setModel ( model ) ; firePropertyChange ( "leftTableModel" , old , model ) ; }
Set the model used by the left table .
53
9
143,722
public void setRightTableModel ( TableModel model ) { TableModel old = m_rightTable . getModel ( ) ; m_rightTable . setModel ( model ) ; firePropertyChange ( "rightTableModel" , old , model ) ; }
Set the model used by the right table .
53
9
143,723
public void process ( InputStream is ) throws Exception { readHeader ( is ) ; readVersion ( is ) ; readTableData ( readTableHeaders ( is ) , is ) ; }
Extract raw table data from the input stream .
39
10
143,724
public StreamReader getTableData ( String name ) throws IOException { InputStream stream = new ByteArrayInputStream ( m_tableData . get ( name ) ) ; if ( m_majorVersion > 5 ) { byte [ ] header = new byte [ 24 ] ; stream . read ( header ) ; SynchroLogger . log ( "TABLE HEADER" , header ) ; } return new StreamReader ( m_majorVersion , stream ) ; }
Return an input stream to read the data from the named table .
95
13
143,725
private List < SynchroTable > readTableHeaders ( InputStream is ) throws IOException { // Read the headers List < SynchroTable > tables = new ArrayList < SynchroTable > ( ) ; byte [ ] header = new byte [ 48 ] ; while ( true ) { is . read ( header ) ; m_offset += 48 ; SynchroTable table = readTableHeader ( header ) ; if ( table == null ) { break ; } tables . add ( table ) ; } // Ensure sorted by offset Collections . sort ( tables , new Comparator < SynchroTable > ( ) { @ Override public int compare ( SynchroTable o1 , SynchroTable o2 ) { return o1 . getOffset ( ) - o2 . getOffset ( ) ; } } ) ; // Calculate lengths SynchroTable previousTable = null ; for ( SynchroTable table : tables ) { if ( previousTable != null ) { previousTable . setLength ( table . getOffset ( ) - previousTable . getOffset ( ) ) ; } previousTable = table ; } for ( SynchroTable table : tables ) { SynchroLogger . log ( "TABLE" , table ) ; } return tables ; }
Read the table headers . This allows us to break the file into chunks representing the individual tables .
264
19
143,726
private SynchroTable readTableHeader ( byte [ ] header ) { SynchroTable result = null ; String tableName = DatatypeConverter . getSimpleString ( header , 0 ) ; if ( ! tableName . isEmpty ( ) ) { int offset = DatatypeConverter . getInt ( header , 40 ) ; result = new SynchroTable ( tableName , offset ) ; } return result ; }
Read the header data for a single file .
92
9
143,727
private void readTableData ( List < SynchroTable > tables , InputStream is ) throws IOException { for ( SynchroTable table : tables ) { if ( REQUIRED_TABLES . contains ( table . getName ( ) ) ) { readTable ( is , table ) ; } } }
Read the data for all of the tables we re interested in .
66
13
143,728
private void readTable ( InputStream is , SynchroTable table ) throws IOException { int skip = table . getOffset ( ) - m_offset ; if ( skip != 0 ) { StreamHelper . skip ( is , skip ) ; m_offset += skip ; } String tableName = DatatypeConverter . getString ( is ) ; int tableNameLength = 2 + tableName . length ( ) ; m_offset += tableNameLength ; int dataLength ; if ( table . getLength ( ) == - 1 ) { dataLength = is . available ( ) ; } else { dataLength = table . getLength ( ) - tableNameLength ; } SynchroLogger . log ( "READ" , tableName ) ; byte [ ] compressedTableData = new byte [ dataLength ] ; is . read ( compressedTableData ) ; m_offset += dataLength ; Inflater inflater = new Inflater ( ) ; inflater . setInput ( compressedTableData ) ; ByteArrayOutputStream outputStream = new ByteArrayOutputStream ( compressedTableData . length ) ; byte [ ] buffer = new byte [ 1024 ] ; while ( ! inflater . finished ( ) ) { int count ; try { count = inflater . inflate ( buffer ) ; } catch ( DataFormatException ex ) { throw new IOException ( ex ) ; } outputStream . write ( buffer , 0 , count ) ; } outputStream . close ( ) ; byte [ ] uncompressedTableData = outputStream . toByteArray ( ) ; SynchroLogger . log ( uncompressedTableData ) ; m_tableData . put ( table . getName ( ) , uncompressedTableData ) ; }
Read data for a single table and store it .
358
10
143,729
private void readHeader ( InputStream is ) throws IOException { byte [ ] header = new byte [ 20 ] ; is . read ( header ) ; m_offset += 20 ; SynchroLogger . log ( "HEADER" , header ) ; }
Read the file header data .
54
6
143,730
private void readVersion ( InputStream is ) throws IOException { BytesReadInputStream bytesReadStream = new BytesReadInputStream ( is ) ; String version = DatatypeConverter . getString ( bytesReadStream ) ; m_offset += bytesReadStream . getBytesRead ( ) ; SynchroLogger . log ( "VERSION" , version ) ; String [ ] versionArray = version . split ( "\\." ) ; m_majorVersion = Integer . parseInt ( versionArray [ 0 ] ) ; }
Read the version number .
112
5
143,731
public List < ProjectFile > readAll ( ) throws MPXJException { Map < Integer , String > projects = listProjects ( ) ; List < ProjectFile > result = new ArrayList < ProjectFile > ( projects . keySet ( ) . size ( ) ) ; for ( Integer id : projects . keySet ( ) ) { setProjectID ( id . intValue ( ) ) ; result . add ( read ( ) ) ; } return result ; }
Convenience method which allows all projects in the database to be read in a single operation .
96
19
143,732
private void processAnalytics ( ) throws SQLException { allocateConnection ( ) ; try { DatabaseMetaData meta = m_connection . getMetaData ( ) ; String productName = meta . getDatabaseProductName ( ) ; if ( productName == null || productName . isEmpty ( ) ) { productName = "DATABASE" ; } else { productName = productName . toUpperCase ( ) ; } ProjectProperties properties = m_reader . getProject ( ) . getProjectProperties ( ) ; properties . setFileApplication ( "Primavera" ) ; properties . setFileType ( productName ) ; } finally { releaseConnection ( ) ; } }
Populate data for analytics .
145
6
143,733
private void processSchedulingProjectProperties ( ) throws SQLException { List < Row > rows = getRows ( "select * from " + m_schema + "projprop where proj_id=? and prop_name='scheduling'" , m_projectID ) ; if ( ! rows . isEmpty ( ) ) { Row row = rows . get ( 0 ) ; Record record = Record . getRecord ( row . getString ( "prop_value" ) ) ; if ( record != null ) { String [ ] keyValues = record . getValue ( ) . split ( "\\|" ) ; for ( int i = 0 ; i < keyValues . length - 1 ; ++ i ) { if ( "sched_calendar_on_relationship_lag" . equals ( keyValues [ i ] ) ) { Map < String , Object > customProperties = new HashMap < String , Object > ( ) ; customProperties . put ( "LagCalendar" , keyValues [ i + 1 ] ) ; m_reader . getProject ( ) . getProjectProperties ( ) . setCustomProperties ( customProperties ) ; break ; } } } } }
Process the scheduling project property from PROJPROP . This table only seems to exist in P6 databases not XER files .
258
26
143,734
private void processDefaultCurrency ( Integer currencyID ) throws SQLException { List < Row > rows = getRows ( "select * from " + m_schema + "currtype where curr_id=?" , currencyID ) ; if ( ! rows . isEmpty ( ) ) { Row row = rows . get ( 0 ) ; m_reader . processDefaultCurrency ( row ) ; } }
Select the default currency properties from the database .
89
9
143,735
public void setSchema ( String schema ) { if ( schema == null ) { schema = "" ; } else { if ( ! schema . isEmpty ( ) && ! schema . endsWith ( "." ) ) { schema = schema + ' ' ; } } m_schema = schema ; }
Set the name of the schema containing the Primavera tables .
62
14
143,736
public void applyPattern ( String primaryPattern , String [ ] alternativePatterns , char decimalSeparator , char groupingSeparator ) { m_symbols . setDecimalSeparator ( decimalSeparator ) ; m_symbols . setGroupingSeparator ( groupingSeparator ) ; setDecimalFormatSymbols ( m_symbols ) ; applyPattern ( primaryPattern ) ; if ( alternativePatterns != null && alternativePatterns . length != 0 ) { int loop ; if ( m_alternativeFormats == null || m_alternativeFormats . length != alternativePatterns . length ) { m_alternativeFormats = new DecimalFormat [ alternativePatterns . length ] ; for ( loop = 0 ; loop < alternativePatterns . length ; loop ++ ) { m_alternativeFormats [ loop ] = new DecimalFormat ( ) ; } } for ( loop = 0 ; loop < alternativePatterns . length ; loop ++ ) { m_alternativeFormats [ loop ] . setDecimalFormatSymbols ( m_symbols ) ; m_alternativeFormats [ loop ] . applyPattern ( alternativePatterns [ loop ] ) ; } } }
This method is used to configure the primary and alternative format patterns .
259
13
143,737
private String getResourceField ( int key ) { String result = null ; if ( key > 0 && key < m_resourceNames . length ) { result = m_resourceNames [ key ] ; } return ( result ) ; }
Given a resource field number this method returns the resource field name .
48
13
143,738
private int getResourceCode ( String field ) throws MPXJException { Integer result = m_resourceNumbers . get ( field ) ; if ( result == null ) { throw new MPXJException ( MPXJException . INVALID_RESOURCE_FIELD_NAME + " " + field ) ; } return ( result . intValue ( ) ) ; }
Given a resource field name this method returns the resource field number .
76
13
143,739
public FieldType getField ( ) { FieldType result = null ; if ( m_index < m_fields . length ) { result = m_fields [ m_index ++ ] ; } return result ; }
Retrieve the next available field .
44
7
143,740
public List < MapRow > readTable ( TableReader reader ) throws IOException { reader . read ( ) ; return reader . getRows ( ) ; }
Read a nested table . Instantiates the supplied reader class to extract the data .
33
17
143,741
public List < MapRow > readUnknownTable ( int rowSize , int rowMagicNumber ) throws IOException { TableReader reader = new UnknownTableReader ( this , rowSize , rowMagicNumber ) ; reader . read ( ) ; return reader . getRows ( ) ; }
Read a nested table whose contents we don t understand .
58
11
143,742
public List < MapRow > readTable ( Class < ? extends TableReader > readerClass ) throws IOException { TableReader reader ; try { reader = readerClass . getConstructor ( StreamReader . class ) . newInstance ( this ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } return readTable ( reader ) ; }
Reads a nested table . Uses the supplied reader class instance .
74
13
143,743
public List < MapRow > readTableConditional ( Class < ? extends TableReader > readerClass ) throws IOException { List < MapRow > result ; if ( DatatypeConverter . getBoolean ( m_stream ) ) { result = readTable ( readerClass ) ; } else { result = Collections . emptyList ( ) ; } return result ; }
Conditionally read a nested table based in the value of a boolean flag which precedes the table data .
77
21
143,744
public ByteArray readBytes ( int size ) throws IOException { byte [ ] data = new byte [ size ] ; m_stream . read ( data ) ; return new ByteArray ( data ) ; }
Read an array of bytes of a specified size .
42
10
143,745
public List < MapRow > readBlocks ( Class < ? extends BlockReader > readerClass ) throws IOException { BlockReader reader ; try { reader = readerClass . getConstructor ( StreamReader . class ) . newInstance ( this ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } return reader . read ( ) ; }
Read a list of fixed size blocks using an instance of the supplied reader class .
74
16
143,746
private void readFile ( InputStream is ) throws IOException { StreamHelper . skip ( is , 64 ) ; int index = 64 ; ArrayList < Integer > offsetList = new ArrayList < Integer > ( ) ; List < String > nameList = new ArrayList < String > ( ) ; while ( true ) { byte [ ] table = new byte [ 32 ] ; is . read ( table ) ; index += 32 ; int offset = PEPUtility . getInt ( table , 0 ) ; offsetList . add ( Integer . valueOf ( offset ) ) ; if ( offset == 0 ) { break ; } nameList . add ( PEPUtility . getString ( table , 5 ) . toUpperCase ( ) ) ; } StreamHelper . skip ( is , offsetList . get ( 0 ) . intValue ( ) - index ) ; for ( int offsetIndex = 1 ; offsetIndex < offsetList . size ( ) - 1 ; offsetIndex ++ ) { String name = nameList . get ( offsetIndex - 1 ) ; Class < ? extends Table > tableClass = TABLE_CLASSES . get ( name ) ; if ( tableClass == null ) { tableClass = Table . class ; } Table table ; try { table = tableClass . newInstance ( ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } m_tables . put ( name , table ) ; table . read ( is ) ; } }
Reads a PEP file from the input stream .
304
11
143,747
private void readCalendars ( ) { // // Create the calendars // for ( MapRow row : getTable ( "NCALTAB" ) ) { ProjectCalendar calendar = m_projectFile . addCalendar ( ) ; calendar . setUniqueID ( row . getInteger ( "UNIQUE_ID" ) ) ; calendar . setName ( row . getString ( "NAME" ) ) ; calendar . setWorkingDay ( Day . SUNDAY , row . getBoolean ( "SUNDAY" ) ) ; calendar . setWorkingDay ( Day . MONDAY , row . getBoolean ( "MONDAY" ) ) ; calendar . setWorkingDay ( Day . TUESDAY , row . getBoolean ( "TUESDAY" ) ) ; calendar . setWorkingDay ( Day . WEDNESDAY , row . getBoolean ( "WEDNESDAY" ) ) ; calendar . setWorkingDay ( Day . THURSDAY , row . getBoolean ( "THURSDAY" ) ) ; calendar . setWorkingDay ( Day . FRIDAY , row . getBoolean ( "FRIDAY" ) ) ; calendar . setWorkingDay ( Day . SATURDAY , row . getBoolean ( "SATURDAY" ) ) ; for ( Day day : Day . values ( ) ) { if ( calendar . isWorkingDay ( day ) ) { // TODO: this is an approximation calendar . addDefaultCalendarHours ( day ) ; } } } // // Set up the hierarchy and add exceptions // Table exceptionsTable = getTable ( "CALXTAB" ) ; for ( MapRow row : getTable ( "NCALTAB" ) ) { ProjectCalendar child = m_projectFile . getCalendarByUniqueID ( row . getInteger ( "UNIQUE_ID" ) ) ; ProjectCalendar parent = m_projectFile . getCalendarByUniqueID ( row . getInteger ( "BASE_CALENDAR_ID" ) ) ; if ( child != null && parent != null ) { child . setParent ( parent ) ; } addCalendarExceptions ( exceptionsTable , child , row . getInteger ( "FIRST_CALENDAR_EXCEPTION_ID" ) ) ; m_eventManager . fireCalendarReadEvent ( child ) ; } }
Read calendar data from a PEP file .
499
9
143,748
private void addCalendarExceptions ( Table table , ProjectCalendar calendar , Integer exceptionID ) { Integer currentExceptionID = exceptionID ; while ( true ) { MapRow row = table . find ( currentExceptionID ) ; if ( row == null ) { break ; } Date date = row . getDate ( "DATE" ) ; ProjectCalendarException exception = calendar . addCalendarException ( date , date ) ; if ( row . getBoolean ( "WORKING" ) ) { exception . addRange ( ProjectCalendarWeek . DEFAULT_WORKING_MORNING ) ; exception . addRange ( ProjectCalendarWeek . DEFAULT_WORKING_AFTERNOON ) ; } currentExceptionID = row . getInteger ( "NEXT_CALENDAR_EXCEPTION_ID" ) ; } }
Read exceptions for a calendar .
176
6
143,749
private void readResources ( ) { for ( MapRow row : getTable ( "RTAB" ) ) { Resource resource = m_projectFile . addResource ( ) ; setFields ( RESOURCE_FIELDS , row , resource ) ; m_eventManager . fireResourceReadEvent ( resource ) ; // TODO: Correctly handle calendar } }
Read resource data from a PEP file .
76
9
143,750
private void readTasks ( ) { Integer rootID = Integer . valueOf ( 1 ) ; readWBS ( m_projectFile , rootID ) ; readTasks ( rootID ) ; m_projectFile . getTasks ( ) . synchronizeTaskIDToHierarchy ( ) ; }
Read task data from a PEP file .
64
9
143,751
private void readWBS ( ChildTaskContainer parent , Integer id ) { Integer currentID = id ; Table table = getTable ( "WBSTAB" ) ; while ( currentID . intValue ( ) != 0 ) { MapRow row = table . find ( currentID ) ; Integer taskID = row . getInteger ( "TASK_ID" ) ; Task task = readTask ( parent , taskID ) ; Integer childID = row . getInteger ( "CHILD_ID" ) ; if ( childID . intValue ( ) != 0 ) { readWBS ( task , childID ) ; } currentID = row . getInteger ( "NEXT_ID" ) ; } }
Recursively read the WBS structure from a PEP file .
148
14
143,752
private void readTasks ( Integer id ) { Integer currentID = id ; Table table = getTable ( "WBSTAB" ) ; while ( currentID . intValue ( ) != 0 ) { MapRow row = table . find ( currentID ) ; Task task = m_projectFile . getTaskByUniqueID ( row . getInteger ( "TASK_ID" ) ) ; readLeafTasks ( task , row . getInteger ( "FIRST_CHILD_TASK_ID" ) ) ; Integer childID = row . getInteger ( "CHILD_ID" ) ; if ( childID . intValue ( ) != 0 ) { readTasks ( childID ) ; } currentID = row . getInteger ( "NEXT_ID" ) ; } }
Read leaf tasks attached to the WBS .
169
9
143,753
private void readLeafTasks ( Task parent , Integer id ) { Integer currentID = id ; Table table = getTable ( "A1TAB" ) ; while ( currentID . intValue ( ) != 0 ) { if ( m_projectFile . getTaskByUniqueID ( currentID ) == null ) { readTask ( parent , currentID ) ; } currentID = table . find ( currentID ) . getInteger ( "NEXT_TASK_ID" ) ; } }
Read the leaf tasks for an individual WBS node .
106
11
143,754
private Task readTask ( ChildTaskContainer parent , Integer id ) { Table a0 = getTable ( "A0TAB" ) ; Table a1 = getTable ( "A1TAB" ) ; Table a2 = getTable ( "A2TAB" ) ; Table a3 = getTable ( "A3TAB" ) ; Table a4 = getTable ( "A4TAB" ) ; Task task = parent . addTask ( ) ; MapRow a1Row = a1 . find ( id ) ; MapRow a2Row = a2 . find ( id ) ; setFields ( A0TAB_FIELDS , a0 . find ( id ) , task ) ; setFields ( A1TAB_FIELDS , a1Row , task ) ; setFields ( A2TAB_FIELDS , a2Row , task ) ; setFields ( A3TAB_FIELDS , a3 . find ( id ) , task ) ; setFields ( A5TAB_FIELDS , a4 . find ( id ) , task ) ; task . setStart ( task . getEarlyStart ( ) ) ; task . setFinish ( task . getEarlyFinish ( ) ) ; if ( task . getName ( ) == null ) { task . setName ( task . getText ( 1 ) ) ; } m_eventManager . fireTaskReadEvent ( task ) ; return task ; }
Read data for an individual task from the tables in a PEP file .
311
15
143,755
private void readRelationships ( ) { for ( MapRow row : getTable ( "CONTAB" ) ) { Task task1 = m_projectFile . getTaskByUniqueID ( row . getInteger ( "TASK_ID_1" ) ) ; Task task2 = m_projectFile . getTaskByUniqueID ( row . getInteger ( "TASK_ID_2" ) ) ; if ( task1 != null && task2 != null ) { RelationType type = row . getRelationType ( "TYPE" ) ; Duration lag = row . getDuration ( "LAG" ) ; Relation relation = task2 . addPredecessor ( task1 , type , lag ) ; m_eventManager . fireRelationReadEvent ( relation ) ; } } }
Read relationship data from a PEP file .
170
9
143,756
private void readResourceAssignments ( ) { for ( MapRow row : getTable ( "USGTAB" ) ) { Task task = m_projectFile . getTaskByUniqueID ( row . getInteger ( "TASK_ID" ) ) ; Resource resource = m_projectFile . getResourceByUniqueID ( row . getInteger ( "RESOURCE_ID" ) ) ; if ( task != null && resource != null ) { ResourceAssignment assignment = task . addResourceAssignment ( resource ) ; m_eventManager . fireAssignmentReadEvent ( assignment ) ; } } }
Read resource assignment data from a PEP file .
128
10
143,757
private Table getTable ( String name ) { Table table = m_tables . get ( name ) ; if ( table == null ) { table = EMPTY_TABLE ; } return table ; }
Retrieve a table by name .
41
7
143,758
private void applyAliases ( ) { CustomFieldContainer fields = m_projectFile . getCustomFields ( ) ; for ( Map . Entry < FieldType , String > entry : ALIASES . entrySet ( ) ) { fields . getCustomField ( entry . getKey ( ) ) . setAlias ( entry . getValue ( ) ) ; } }
Configure column aliases .
75
5
143,759
private static void defineField ( Map < String , FieldType > container , String name , FieldType type , String alias ) { container . put ( name , type ) ; if ( alias != null ) { ALIASES . put ( type , alias ) ; } }
Configure the mapping between a database column and a field including definition of an alias .
55
17
143,760
public String getString ( Integer type ) { String result = null ; byte [ ] item = m_map . get ( type ) ; if ( item != null ) { result = m_data . getString ( getOffset ( item ) ) ; } return ( result ) ; }
Retrieves a string value from the extended data .
58
11
143,761
public int getInt ( Integer type ) { int result = 0 ; byte [ ] item = m_map . get ( type ) ; if ( item != null ) { result = MPPUtility . getInt ( item , 0 ) ; } return ( result ) ; }
Retrieves an integer value from the extended data .
57
11
143,762
public long getLong ( Integer type ) { long result = 0 ; byte [ ] item = m_map . get ( type ) ; if ( item != null ) { result = MPPUtility . getLong6 ( item , 0 ) ; } return ( result ) ; }
Retrieves a long value from the extended data .
58
11
143,763
private void processHyperlinkData ( ResourceAssignment assignment , byte [ ] data ) { if ( data != null ) { int offset = 12 ; offset += 12 ; String hyperlink = MPPUtility . getUnicodeString ( data , offset ) ; offset += ( ( hyperlink . length ( ) + 1 ) * 2 ) ; offset += 12 ; String address = MPPUtility . getUnicodeString ( data , offset ) ; offset += ( ( address . length ( ) + 1 ) * 2 ) ; offset += 12 ; String subaddress = MPPUtility . getUnicodeString ( data , offset ) ; offset += ( ( subaddress . length ( ) + 1 ) * 2 ) ; offset += 12 ; String screentip = MPPUtility . getUnicodeString ( data , offset ) ; assignment . setHyperlink ( hyperlink ) ; assignment . setHyperlinkAddress ( address ) ; assignment . setHyperlinkSubAddress ( subaddress ) ; assignment . setHyperlinkScreenTip ( screentip ) ; } }
Extract assignment hyperlink data .
225
7
143,764
private void createTimephasedData ( ProjectFile file , ResourceAssignment assignment , List < TimephasedWork > timephasedPlanned , List < TimephasedWork > timephasedComplete ) { if ( timephasedPlanned . isEmpty ( ) && timephasedComplete . isEmpty ( ) ) { Duration totalMinutes = assignment . getWork ( ) . convertUnits ( TimeUnit . MINUTES , file . getProjectProperties ( ) ) ; Duration workPerDay ; if ( assignment . getResource ( ) == null || assignment . getResource ( ) . getType ( ) == ResourceType . WORK ) { workPerDay = totalMinutes . getDuration ( ) == 0 ? totalMinutes : ResourceAssignmentFactory . DEFAULT_NORMALIZER_WORK_PER_DAY ; int units = NumberHelper . getInt ( assignment . getUnits ( ) ) ; if ( units != 100 ) { workPerDay = Duration . getInstance ( ( workPerDay . getDuration ( ) * units ) / 100.0 , workPerDay . getUnits ( ) ) ; } } else { if ( assignment . getVariableRateUnits ( ) == null ) { Duration workingDays = assignment . getCalendar ( ) . getWork ( assignment . getStart ( ) , assignment . getFinish ( ) , TimeUnit . DAYS ) ; double units = NumberHelper . getDouble ( assignment . getUnits ( ) ) ; double unitsPerDayAsMinutes = ( units * 60 ) / ( workingDays . getDuration ( ) * 100 ) ; workPerDay = Duration . getInstance ( unitsPerDayAsMinutes , TimeUnit . MINUTES ) ; } else { double unitsPerHour = NumberHelper . getDouble ( assignment . getUnits ( ) ) ; workPerDay = ResourceAssignmentFactory . DEFAULT_NORMALIZER_WORK_PER_DAY ; Duration hoursPerDay = workPerDay . convertUnits ( TimeUnit . HOURS , file . getProjectProperties ( ) ) ; double unitsPerDayAsHours = ( unitsPerHour * hoursPerDay . getDuration ( ) ) / 100 ; double unitsPerDayAsMinutes = unitsPerDayAsHours * 60 ; workPerDay = Duration . getInstance ( unitsPerDayAsMinutes , TimeUnit . MINUTES ) ; } } Duration overtimeWork = assignment . getOvertimeWork ( ) ; if ( overtimeWork != null && overtimeWork . getDuration ( ) != 0 ) { Duration totalOvertimeMinutes = overtimeWork . convertUnits ( TimeUnit . MINUTES , file . getProjectProperties ( ) ) ; totalMinutes = Duration . getInstance ( totalMinutes . getDuration ( ) - totalOvertimeMinutes . getDuration ( ) , TimeUnit . MINUTES ) ; } TimephasedWork tra = new TimephasedWork ( ) ; tra . setStart ( assignment . getStart ( ) ) ; tra . setAmountPerDay ( workPerDay ) ; tra . setModified ( false ) ; tra . setFinish ( assignment . getFinish ( ) ) ; tra . setTotalAmount ( totalMinutes ) ; timephasedPlanned . add ( tra ) ; } }
Method used to create missing timephased data .
686
10
143,765
public static final String printTimestamp ( Date value ) { return ( value == null ? null : TIMESTAMP_FORMAT . get ( ) . format ( value ) ) ; }
Print a timestamp value .
38
5
143,766
public static final Duration parseDuration ( String value ) { return value == null ? null : Duration . getInstance ( Double . parseDouble ( value ) , TimeUnit . DAYS ) ; }
Parse a duration value .
39
6
143,767
public static final String printDuration ( Duration value ) { return value == null ? null : Double . toString ( value . getDuration ( ) ) ; }
Print a duration value .
32
5
143,768
public static final String printDate ( Date value ) { return ( value == null ? null : DATE_FORMAT . get ( ) . format ( value ) ) ; }
Print a date .
36
4
143,769
public static final Double parsePercent ( String value ) { return value == null ? null : Double . valueOf ( Double . parseDouble ( value ) * 100.0 ) ; }
Parse a percent complete value .
37
7
143,770
public static final String printPercent ( Double value ) { return value == null ? null : Double . toString ( value . doubleValue ( ) / 100.0 ) ; }
Print a percent complete value .
36
6
143,771
public List < PPVItemsType . PPVItem > getPPVItem ( ) { if ( ppvItem == null ) { ppvItem = new ArrayList < PPVItemsType . PPVItem > ( ) ; } return this . ppvItem ; }
Gets the value of the ppvItem property .
57
11
143,772
public CustomField getCustomField ( FieldType field ) { CustomField result = m_configMap . get ( field ) ; if ( result == null ) { result = new CustomField ( field , this ) ; m_configMap . put ( field , result ) ; } return result ; }
Retrieve configuration details for a given custom field .
61
10
143,773
void registerAlias ( FieldType type , String alias ) { m_aliasMap . put ( new Pair < FieldTypeClass , String > ( type . getFieldTypeClass ( ) , alias ) , type ) ; }
When an alias for a field is added index it here to allow lookup by alias and type .
45
19
143,774
public FieldType getFieldByAlias ( FieldTypeClass typeClass , String alias ) { return m_aliasMap . get ( new Pair < FieldTypeClass , String > ( typeClass , alias ) ) ; }
Retrieve a field from a particular entity using its alias .
44
12
143,775
private void createFieldMap ( byte [ ] data ) { int index = 0 ; int lastDataBlockOffset = 0 ; int dataBlockIndex = 0 ; while ( index < data . length ) { long mask = MPPUtility . getInt ( data , index + 0 ) ; //mask = mask << 4; int dataBlockOffset = MPPUtility . getShort ( data , index + 4 ) ; //int metaFlags = MPPUtility.getByte(data, index + 8); FieldType type = getFieldType ( MPPUtility . getInt ( data , index + 12 ) ) ; int category = MPPUtility . getShort ( data , index + 20 ) ; //int sizeInBytes = MPPUtility.getShort(data, index + 22); //int metaIndex = MPPUtility.getInt(data, index + 24); // // Categories // // 02 - Short values [RATE_UNITS, WORKGROUP, ACCRUE, TIME_UNITS, PRIORITY, TASK_TYPE, CONSTRAINT, ACCRUE, PERCENTAGE, SHORT, WORK_UNITS] - BOOKING_TYPE, EARNED_VALUE_METHOD, DELIVERABLE_TYPE, RESOURCE_REQUEST_TYPE - we have as string in MPXJ???? // 03 - Int values [DURATION, INTEGER] - Recalc outline codes as Boolean? // 05 - Rate, Number [RATE, NUMERIC] // 08 - String (and some durations!!!) [STRING, DURATION] // 0B - Boolean (meta block 0?) - [BOOLEAN] // 13 - Date - [DATE] // 48 - GUID - [GUID] // 64 - Boolean (meta block 1?)- [BOOLEAN] // 65 - Work, Currency [WORK, CURRENCY] // 66 - Units [UNITS] // 1D - Raw bytes [BINARY, ASCII_STRING] - Exception: outline code indexes, they are integers, but stored as part of a binary block int varDataKey ; if ( useTypeAsVarDataKey ( ) ) { Integer substitute = substituteVarDataKey ( type ) ; if ( substitute == null ) { varDataKey = ( MPPUtility . getInt ( data , index + 12 ) & 0x0000FFFF ) ; } else { varDataKey = substitute . intValue ( ) ; } } else { varDataKey = MPPUtility . getByte ( data , index + 6 ) ; } FieldLocation location ; int metaBlock ; switch ( category ) { case 0x0B : { location = FieldLocation . META_DATA ; metaBlock = 0 ; break ; } case 0x64 : { location = FieldLocation . META_DATA ; metaBlock = 1 ; break ; } default : { metaBlock = 0 ; if ( dataBlockOffset != 65535 ) { location = FieldLocation . FIXED_DATA ; if ( dataBlockOffset < lastDataBlockOffset ) { ++ dataBlockIndex ; } lastDataBlockOffset = dataBlockOffset ; int typeSize = getFixedDataFieldSize ( type ) ; if ( dataBlockOffset + typeSize > m_maxFixedDataSize [ dataBlockIndex ] ) { m_maxFixedDataSize [ dataBlockIndex ] = dataBlockOffset + typeSize ; } } else { if ( varDataKey != 0 ) { location = FieldLocation . VAR_DATA ; } else { location = FieldLocation . UNKNOWN ; } } break ; } } FieldItem item = new FieldItem ( type , location , dataBlockIndex , dataBlockOffset , varDataKey , mask , metaBlock ) ; if ( m_debug ) { System . out . println ( ByteArrayHelper . hexdump ( data , index , 28 , false ) + " " + item + " mpxjDataType=" + item . getType ( ) . getDataType ( ) + " index=" + index ) ; } m_map . put ( type , item ) ; index += 28 ; } }
Generic method used to create a field map from a block of data .
866
14
143,776
public void createTaskFieldMap ( Props props ) { byte [ ] fieldMapData = null ; for ( Integer key : TASK_KEYS ) { fieldMapData = props . getByteArray ( key ) ; if ( fieldMapData != null ) { break ; } } if ( fieldMapData == null ) { populateDefaultData ( getDefaultTaskData ( ) ) ; } else { createFieldMap ( fieldMapData ) ; } }
Creates a field map for tasks .
95
8
143,777
public void createRelationFieldMap ( Props props ) { byte [ ] fieldMapData = null ; for ( Integer key : RELATION_KEYS ) { fieldMapData = props . getByteArray ( key ) ; if ( fieldMapData != null ) { break ; } } if ( fieldMapData == null ) { populateDefaultData ( getDefaultRelationData ( ) ) ; } else { createFieldMap ( fieldMapData ) ; } }
Creates a field map for relations .
96
8
143,778
public void createEnterpriseCustomFieldMap ( Props props , Class < ? > c ) { byte [ ] fieldMapData = null ; for ( Integer key : ENTERPRISE_CUSTOM_KEYS ) { fieldMapData = props . getByteArray ( key ) ; if ( fieldMapData != null ) { break ; } } if ( fieldMapData != null ) { int index = 4 ; while ( index < fieldMapData . length ) { //Looks like the custom fields have varying types, it may be that the last byte of the four represents the type? //System.out.println(ByteArrayHelper.hexdump(fieldMapData, index, 4, false)); int typeValue = MPPUtility . getInt ( fieldMapData , index ) ; FieldType type = getFieldType ( typeValue ) ; if ( type != null && type . getClass ( ) == c && type . toString ( ) . startsWith ( "Enterprise Custom Field" ) ) { int varDataKey = ( typeValue & 0xFFFF ) ; FieldItem item = new FieldItem ( type , FieldLocation . VAR_DATA , 0 , 0 , varDataKey , 0 , 0 ) ; m_map . put ( type , item ) ; //System.out.println(item); } //System.out.println((type == null ? "?" : type.getClass().getSimpleName() + "." + type) + " " + Integer.toHexString(typeValue)); index += 4 ; } } }
Create a field map for enterprise custom fields .
327
9
143,779
public void createResourceFieldMap ( Props props ) { byte [ ] fieldMapData = null ; for ( Integer key : RESOURCE_KEYS ) { fieldMapData = props . getByteArray ( key ) ; if ( fieldMapData != null ) { break ; } } if ( fieldMapData == null ) { populateDefaultData ( getDefaultResourceData ( ) ) ; } else { createFieldMap ( fieldMapData ) ; } }
Creates a field map for resources .
94
8
143,780
public void createAssignmentFieldMap ( Props props ) { //System.out.println("ASSIGN"); byte [ ] fieldMapData = null ; for ( Integer key : ASSIGNMENT_KEYS ) { fieldMapData = props . getByteArray ( key ) ; if ( fieldMapData != null ) { break ; } } if ( fieldMapData == null ) { populateDefaultData ( getDefaultAssignmentData ( ) ) ; } else { createFieldMap ( fieldMapData ) ; } }
Creates a field map for assignments .
107
8
143,781
private void populateDefaultData ( FieldItem [ ] defaultData ) { for ( FieldItem item : defaultData ) { m_map . put ( item . getType ( ) , item ) ; } }
This method takes an array of data and uses this to populate the field map .
42
16
143,782
public void populateContainer ( Class < ? extends FieldType > type , FieldContainer container , Integer id , byte [ ] [ ] fixedData , Var2Data varData ) { //System.out.println(container.getClass().getSimpleName()+": " + id); for ( FieldItem item : m_map . values ( ) ) { if ( item . getType ( ) . getClass ( ) . equals ( type ) ) { //System.out.println(item.m_type); Object value = item . read ( id , fixedData , varData ) ; //System.out.println(item.m_type.getClass().getSimpleName() + "." + item.m_type + ": " + value); container . set ( item . getType ( ) , value ) ; } } }
Given a container and a set of raw data blocks this method extracts the field data and writes it into the container .
175
23
143,783
public int getFixedDataOffset ( FieldType type ) { int result ; FieldItem item = m_map . get ( type ) ; if ( item != null ) { result = item . getFixedDataOffset ( ) ; } else { result = - 1 ; } return result ; }
Retrieve the fixed data offset for a specific field .
59
11
143,784
public Integer getVarDataKey ( FieldType type ) { Integer result = null ; FieldItem item = m_map . get ( type ) ; if ( item != null ) { result = item . getVarDataKey ( ) ; } return result ; }
Retrieve the var data key for a specific field .
53
11
143,785
public FieldType getFieldTypeFromVarDataKey ( Integer key ) { FieldType result = null ; for ( Entry < FieldType , FieldMap . FieldItem > entry : m_map . entrySet ( ) ) { if ( entry . getValue ( ) . getFieldLocation ( ) == FieldLocation . VAR_DATA && entry . getValue ( ) . getVarDataKey ( ) . equals ( key ) ) { result = entry . getKey ( ) ; break ; } } return result ; }
Used to map from a var data key to a field type . Note this is designed for diagnostic use only and uses an inefficient search .
106
27
143,786
public FieldLocation getFieldLocation ( FieldType type ) { FieldLocation result = null ; FieldItem item = m_map . get ( type ) ; if ( item != null ) { result = item . getFieldLocation ( ) ; } return result ; }
Retrieve the field location for a specific field .
53
10
143,787
protected Object getFieldData ( Integer id , FieldType type , byte [ ] [ ] fixedData , Var2Data varData ) { Object result = null ; FieldItem item = m_map . get ( type ) ; if ( item != null ) { result = item . read ( id , fixedData , varData ) ; } return result ; }
Retrieve a single field value .
73
7
143,788
public void dumpKnownFieldMaps ( Props props ) { //for (int key=131092; key < 131098; key++) for ( int key = 50331668 ; key < 50331674 ; key ++ ) { byte [ ] fieldMapData = props . getByteArray ( Integer . valueOf ( key ) ) ; if ( fieldMapData != null ) { System . out . println ( "KEY: " + key ) ; createFieldMap ( fieldMapData ) ; System . out . println ( toString ( ) ) ; clear ( ) ; } } }
Diagnostic method used to dump known field map data .
124
12
143,789
private int getFixedDataFieldSize ( FieldType type ) { int result = 0 ; DataType dataType = type . getDataType ( ) ; if ( dataType != null ) { switch ( dataType ) { case DATE : case INTEGER : case DURATION : { result = 4 ; break ; } case TIME_UNITS : case CONSTRAINT : case PRIORITY : case PERCENTAGE : case TASK_TYPE : case ACCRUE : case SHORT : case BOOLEAN : case DELAY : case WORKGROUP : case RATE_UNITS : case EARNED_VALUE_METHOD : case RESOURCE_REQUEST_TYPE : { result = 2 ; break ; } case CURRENCY : case UNITS : case RATE : case WORK : { result = 8 ; break ; } case WORK_UNITS : { result = 1 ; break ; } case GUID : { result = 16 ; break ; } default : { result = 0 ; break ; } } } return result ; }
Determine the size of a field in a fixed data block .
218
14
143,790
void setParent ( ProjectCalendarWeek parent ) { m_parent = parent ; for ( int loop = 0 ; loop < m_days . length ; loop ++ ) { if ( m_days [ loop ] == null ) { m_days [ loop ] = DayType . DEFAULT ; } } }
Set the parent from which this week is derived .
64
10
143,791
public ProjectCalendarHours getHours ( Day day ) { ProjectCalendarHours result = getCalendarHours ( day ) ; if ( result == null ) { // // If this is a base calendar and we have no hours, then we // have a problem - so we add the default hours and try again // if ( m_parent == null ) { // Only add default hours for the day that is 'missing' to avoid overwriting real calendar hours addDefaultCalendarHours ( day ) ; result = getCalendarHours ( day ) ; } else { result = m_parent . getHours ( day ) ; } } return result ; }
This method retrieves the calendar hours for the specified day . Note that if this is a derived calendar then this method will refer to the base calendar where no hours are specified in the derived calendar .
134
39
143,792
public void addDefaultCalendarHours ( Day day ) { ProjectCalendarHours hours = addCalendarHours ( day ) ; if ( day != Day . SATURDAY && day != Day . SUNDAY ) { hours . addRange ( DEFAULT_WORKING_MORNING ) ; hours . addRange ( DEFAULT_WORKING_AFTERNOON ) ; } }
This is a convenience method used to add a default set of calendar hours to a calendar .
79
18
143,793
public ProjectCalendarHours addCalendarHours ( Day day ) { ProjectCalendarHours bch = new ProjectCalendarHours ( this ) ; bch . setDay ( day ) ; m_hours [ day . getValue ( ) - 1 ] = bch ; return ( bch ) ; }
Used to add working hours to the calendar . Note that the MPX file definition allows a maximum of 7 calendar hours records to be added to a single calendar .
63
32
143,794
public void attachHoursToDay ( ProjectCalendarHours hours ) { if ( hours . getParentCalendar ( ) != this ) { throw new IllegalArgumentException ( ) ; } m_hours [ hours . getDay ( ) . getValue ( ) - 1 ] = hours ; }
Attaches a pre - existing set of hours to the correct day within the calendar .
60
17
143,795
public void removeHoursFromDay ( ProjectCalendarHours hours ) { if ( hours . getParentCalendar ( ) != this ) { throw new IllegalArgumentException ( ) ; } m_hours [ hours . getDay ( ) . getValue ( ) - 1 ] = null ; }
Removes a set of calendar hours from the day to which they are currently attached .
60
17
143,796
public void setWorkingDay ( Day day , boolean working ) { setWorkingDay ( day , ( working ? DayType . WORKING : DayType . NON_WORKING ) ) ; }
convenience method for setting working or non - working days .
39
13
143,797
public void setWorkingDay ( Day day , DayType working ) { DayType value ; if ( working == null ) { if ( isDerived ( ) ) { value = DayType . DEFAULT ; } else { value = DayType . WORKING ; } } else { value = working ; } m_days [ day . getValue ( ) - 1 ] = value ; }
This is a convenience method provided to allow a day to be set as working or non - working by using the day number to identify the required day .
79
30
143,798
private void readCalendar ( Gantt gantt ) { Gantt . Calendar ganttCalendar = gantt . getCalendar ( ) ; m_projectFile . getProjectProperties ( ) . setWeekStartDay ( ganttCalendar . getWeekStart ( ) ) ; ProjectCalendar calendar = m_projectFile . addCalendar ( ) ; calendar . setName ( "Standard" ) ; m_projectFile . setDefaultCalendar ( calendar ) ; String workingDays = ganttCalendar . getWorkDays ( ) ; calendar . setWorkingDay ( Day . SUNDAY , workingDays . charAt ( 0 ) == ' ' ) ; calendar . setWorkingDay ( Day . MONDAY , workingDays . charAt ( 1 ) == ' ' ) ; calendar . setWorkingDay ( Day . TUESDAY , workingDays . charAt ( 2 ) == ' ' ) ; calendar . setWorkingDay ( Day . WEDNESDAY , workingDays . charAt ( 3 ) == ' ' ) ; calendar . setWorkingDay ( Day . THURSDAY , workingDays . charAt ( 4 ) == ' ' ) ; calendar . setWorkingDay ( Day . FRIDAY , workingDays . charAt ( 5 ) == ' ' ) ; calendar . setWorkingDay ( Day . SATURDAY , workingDays . charAt ( 6 ) == ' ' ) ; for ( int i = 1 ; i <= 7 ; i ++ ) { Day day = Day . getInstance ( i ) ; ProjectCalendarHours hours = calendar . addCalendarHours ( day ) ; if ( calendar . isWorkingDay ( day ) ) { hours . addRange ( ProjectCalendarWeek . DEFAULT_WORKING_MORNING ) ; hours . addRange ( ProjectCalendarWeek . DEFAULT_WORKING_AFTERNOON ) ; } } for ( Gantt . Holidays . Holiday holiday : gantt . getHolidays ( ) . getHoliday ( ) ) { ProjectCalendarException exception = calendar . addCalendarException ( holiday . getDate ( ) , holiday . getDate ( ) ) ; exception . setName ( holiday . getContent ( ) ) ; } }
Read the calendar data from a Gantt Designer file .
468
12
143,799
private void processTasks ( Gantt gantt ) { ProjectCalendar calendar = m_projectFile . getDefaultCalendar ( ) ; for ( Gantt . Tasks . Task ganttTask : gantt . getTasks ( ) . getTask ( ) ) { String wbs = ganttTask . getID ( ) ; ChildTaskContainer parentTask = getParentTask ( wbs ) ; Task task = parentTask . addTask ( ) ; //ganttTask.getB() // bar type //ganttTask.getBC() // bar color task . setCost ( ganttTask . getC ( ) ) ; task . setName ( ganttTask . getContent ( ) ) ; task . setDuration ( ganttTask . getD ( ) ) ; task . setDeadline ( ganttTask . getDL ( ) ) ; //ganttTask.getH() // height //ganttTask.getIn(); // indent task . setWBS ( wbs ) ; task . setPercentageComplete ( ganttTask . getPC ( ) ) ; task . setStart ( ganttTask . getS ( ) ) ; //ganttTask.getU(); // Unknown //ganttTask.getVA(); // Valign task . setFinish ( calendar . getDate ( task . getStart ( ) , task . getDuration ( ) , false ) ) ; m_taskMap . put ( wbs , task ) ; } }
Read task data from a Gantt Designer file .
322
11