src_fm_fc_ms_ff
stringlengths
43
86.8k
target
stringlengths
20
276k
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev != null && prev.size() > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsReceived", "" + prev.size() ), stepinfo ); remarks.add( cr ); String error_message = ""; boolean error_found = false; for ( int i = 0; i < outputFields.length; i++ ) { int idx = prev.indexOfValue( outputFields[i].getFieldName() ); if ( idx < 0 ) { error_message += "\t\t" + outputFields[i].getFieldName() + Const.CR; error_found = true; } } if ( error_found ) { error_message = BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsNotFound", error_message ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.AllFieldsFound" ), stepinfo ); remarks.add( cr ); } } if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputOk" ), stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputError" ), stepinfo ); remarks.add( cr ); } cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FilesNotChecked" ), stepinfo ); remarks.add( cr ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
@Test public void testCheck() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); xmlOutputMeta.setDefault(); TransMeta transMeta = mock( TransMeta.class ); StepMeta stepInfo = mock( StepMeta.class ); RowMetaInterface prev = mock( RowMetaInterface.class ); Repository repos = mock( Repository.class ); IMetaStore metastore = mock( IMetaStore.class ); RowMetaInterface info = mock( RowMetaInterface.class ); ArrayList<CheckResultInterface> remarks = new ArrayList<>(); xmlOutputMeta.check( remarks, transMeta, stepInfo, prev, new String[] { "input" }, new String[] { "output" }, info, new Variables(), repos, metastore ); assertEquals( 2, remarks.size() ); assertEquals( "Step is receiving info from other steps.", remarks.get( 0 ).getText() ); assertEquals( "File specifications are not checked.", remarks.get( 1 ).getText() ); XMLField xmlField = new XMLField(); xmlField.setFieldName( "aField" ); xmlField.setType( 1 ); xmlField.setLength( 10 ); xmlField.setPrecision( 3 ); xmlOutputMeta.setOutputFields( new XMLField[] { xmlField } ); when( prev.size() ).thenReturn( 1 ); remarks.clear(); xmlOutputMeta.check( remarks, transMeta, stepInfo, prev, new String[] { "input" }, new String[] { "output" }, info, new Variables(), repos, metastore ); assertEquals( 4, remarks.size() ); assertEquals( "Step is connected to previous one, receiving 1 fields", remarks.get( 0 ).getText() ); assertEquals( "All output fields are found in the input stream.", remarks.get( 1 ).getText() ); assertEquals( "Step is receiving info from other steps.", remarks.get( 2 ).getText() ); assertEquals( "File specifications are not checked.", remarks.get( 3 ).getText() ); }
GoogleAnalyticsApiFacade { public static GoogleAnalyticsApiFacade createFor( String application, String oauthServiceAccount, String oauthKeyFile ) throws GeneralSecurityException, IOException, KettleFileException { return new GoogleAnalyticsApiFacade( GoogleNetHttpTransport.newTrustedTransport(), JacksonFactory.getDefaultInstance(), application, oauthServiceAccount, new File( KettleVFS.getFileObject( oauthKeyFile ).getURL().getPath() ) ); } GoogleAnalyticsApiFacade( HttpTransport httpTransport, JsonFactory jsonFactory, String application, String oathServiceEmail, File keyFile ); static GoogleAnalyticsApiFacade createFor( String application, String oauthServiceAccount, String oauthKeyFile ); void close(); Analytics getAnalytics(); }
@Test public void exceptionIsThrowsForNonExistingFiles() throws Exception { GoogleAnalyticsApiFacade.createFor( "application-name", "account", path ); }
GaInputStepDialog extends BaseStepDialog implements StepDialogInterface { void getFields() { Analytics.Data.Ga.Get query = getPreviewQuery(); if ( query == null ) { return; } query.setMaxResults( 1 ); try { GaData dataFeed = query.execute(); if ( dataFeed == null || dataFeed.getRows() == null || dataFeed.getRows().size() < 1 ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( "Query yields empty feed" ); mb.setMessage( "The feed did not give any results. Please specify a query that returns data." ); mb.open(); return; } int i = 0; List<GaData.ColumnHeaders> colHeaders = dataFeed.getColumnHeaders(); getTableView().table.setItemCount( colHeaders.size() + dataFeed.getProfileInfo().size() ); for ( GaData.ColumnHeaders colHeader : colHeaders ) { String name = colHeader.getName(); String dataType = colHeader.getDataType(); String columnType = colHeader.getColumnType(); TableItem item = getTableView().table.getItem( i ); if ( columnType.equals( "DIMENSION" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_DIMENSION ); item.setText( 2, name ); item.setText( 3, name ); if ( name.equalsIgnoreCase( "ga:date" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); item.setText( 5, "yyyyMMdd" ); } else if ( name.equalsIgnoreCase( "ga:daysSinceLastVisit" ) || name.equalsIgnoreCase( "ga:visitLength" ) || name.equalsIgnoreCase( "ga:visitCount" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else if ( name.equalsIgnoreCase( "ga:latitude" ) || name.equalsIgnoreCase( "ga:longitude" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } else if ( columnType.equals( "METRIC" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_METRIC ); item.setText( 2, name ); item.setText( 3, name ); if ( dataType.compareToIgnoreCase( "currency" ) == 0 || dataType.compareToIgnoreCase( "float" ) == 0 || dataType.compareToIgnoreCase( "percent" ) == 0 || dataType.compareToIgnoreCase( "us_currency" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else if ( dataType.compareToIgnoreCase( "time" ) == 0 || dataType.compareToIgnoreCase( "integer" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } } TableItem item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); getTableView().removeEmptyRows(); getTableView().setRowNums(); getTableView().optWidth( true ); getInput().setChanged(); } catch ( IOException ioe ) { Exception exceptionToDisplay = ioe; if ( ioe instanceof GoogleJsonResponseException ) { GoogleJsonResponseException gjre = (GoogleJsonResponseException) ioe; if ( gjre.getDetails() != null && gjre.getDetails().getMessage() != null ) { exceptionToDisplay = new IOException( gjre.getDetails().getMessage(), gjre ); } } new ErrorDialog( shell, BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogTitle" ), BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogMessage" ), exceptionToDisplay ); } } GaInputStepDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void readGaProfiles(); void readGaSegments(); void getData(); }
@Test public void testGetFields() throws Exception { dialog.getFields(); verify( query ).setMaxResults( 1 ); verify( table, times( 19 ) ).getItem( anyInt() ); verify( tableItem, times( 7 ) ).setText( 1, GaInputStepMeta.FIELD_TYPE_DIMENSION ); verify( tableItem, times( 7 ) ).setText( 1, GaInputStepMeta.FIELD_TYPE_METRIC ); verify( tableItem, times( 3 ) ).setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); verify( tableItem, times( 2 ) ).setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); for ( ColumnHeaders header : headers ) { verify( tableItem, times( 1 ) ).setText( 2, header.getName() ); verify( tableItem, times( 1 ) ).setText( 3, header.getName() ); } verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); verify( tableItem, times( 1 ) ).setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); verify( tableItem, times( 5 ) ).setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); verify( tableItem, times( 6 ) ).setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); verify( tableItem, times( 7 ) ).setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); }
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
@Test public void testSetCalendarStartNull() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar endDate = new GregorianCalendar( 2000, 2, 10 ); try { connection.setCalendar( recordsFilter, null, endDate ); fail(); } catch ( KettleException expected ) { } } @Test public void testSetCalendarEndNull() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar startDate = new GregorianCalendar( 2000, 2, 10 ); try { connection.setCalendar( recordsFilter, startDate, null ); fail(); } catch ( KettleException expected ) { } } @Test public void testSetCalendarStartDateTooOlder() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar startDate = new GregorianCalendar( 2000, 3, 20 ); GregorianCalendar endDate = new GregorianCalendar( 2000, 2, 10 ); try { connection.setCalendar( recordsFilter, startDate, endDate ); fail(); } catch ( KettleException expected ) { } } @Test public void testSetCalendarDatesTooFarApart() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar startDate = new GregorianCalendar( 2000, 1, 1 ); GregorianCalendar endDate = new GregorianCalendar( 2000, 2, 11 ); try { connection.setCalendar( recordsFilter, startDate, endDate ); fail(); } catch ( KettleException expected ) { } } @Test public void testSetCalendar() { SalesforceConnection conn = mock( SalesforceConnection.class, Mockito.CALLS_REAL_METHODS ); try { conn.setCalendar( new Random().nextInt( SalesforceConnectionUtils.recordsFilterDesc.length ), new GregorianCalendar( 2016, Calendar.JANUARY, 1 ), new GregorianCalendar( 2016, Calendar.JANUARY, 31 ) ); } catch ( KettleException e ) { fail(); } try { conn.setCalendar( new Random().nextInt( SalesforceConnectionUtils.recordsFilterDesc.length ), new GregorianCalendar( 2016, Calendar.JANUARY, 31 ), new GregorianCalendar( 2016, Calendar.JANUARY, 1 ) ); fail(); } catch ( KettleException expected ) { } try { conn.setCalendar( new Random().nextInt( SalesforceConnectionUtils.recordsFilterDesc.length ), null, new GregorianCalendar( 2016, Calendar.JANUARY, 31 ) ); fail(); } catch ( KettleException expected ) { } try { conn.setCalendar( new Random().nextInt( SalesforceConnectionUtils.recordsFilterDesc.length ), new GregorianCalendar( 2016, Calendar.JANUARY, 1 ), null ); fail(); } catch ( KettleException expected ) { } }
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ) throws KettleException { ObjectId id = connectionDelegate.getNextClusterSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER ), clusterSchema .getObjectId() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE ), slaveServer .getObjectId() ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir, String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment, RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir, String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory, RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate, ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers, String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory, String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code, DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr, String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode, String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code, Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory, boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
@Test public void testInsertClusterSlave() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); doReturn( new LongObjectId( 357 ) ).when( repo.connectionDelegate ).getNextClusterSlaveID(); SlaveServer testSlave = new SlaveServer( "slave1", "fakelocal", "9081", "fakeuser", "fakepass" ); testSlave.setObjectId( new LongObjectId( 864 ) ); ClusterSchema testSchema = new ClusterSchema( "schema1", Arrays.asList( testSlave ) ); testSchema.setObjectId( new LongObjectId( 159 ) ); ObjectId result = repo.insertClusterSlave( testSchema, testSlave ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 357 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 159 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 864 ), insertRecord.getInteger( 2 ) ); assertEquals( new LongObjectId( 357 ), result ); }
SalesforceConnection { public PartnerConnection createBinding( ConnectorConfig config ) throws ConnectionException { if ( this.binding == null ) { this.binding = new PartnerConnection( config ); } return this.binding; } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
@Test public void testCreateBinding() throws KettleException, ConnectionException { SalesforceConnection conn = new SalesforceConnection( null, "http: ConnectorConfig config = new ConnectorConfig(); config.setAuthEndpoint( Connector.END_POINT ); config.setManualLogin( true ); assertNull( conn.getBinding() ); conn.createBinding( config ); PartnerConnection binding1 = conn.getBinding(); conn.createBinding( config ); PartnerConnection binding2 = conn.getBinding(); assertSame( binding1, binding2 ); }
SalesforceConnection { public String getRecordValue( SObject con, String fieldname ) throws KettleException { String[] fieldHierarchy = fieldname.split( "\\." ); if ( con == null ) { return null; } else { XmlObject element = getMessageElementForHierarchy( con, fieldHierarchy ); if ( element != null ) { Object object = element.getValue(); if ( object != null ) { if ( object instanceof QueryResult ) { return buildJsonQueryResult( (QueryResult) object ); } return String.valueOf( object ); } else { return (String) element.getValue(); } } } return null; } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
@Test public void testGetRecordValue() throws Exception { SalesforceConnection conn = mock( SalesforceConnection.class, Mockito.CALLS_REAL_METHODS ); SObject sObject = new SObject(); sObject.setName( new QName( Constants.PARTNER_SOBJECT_NS, "sObject" ) ); SObject testObject = createObject( "field", "value" ); sObject.addField( "field", testObject ); assertEquals( "Get value of simple record", "value", conn.getRecordValue( sObject, "field" ) ); SObject parentObject = createObject( "parentField", null ); sObject.addField( "parentField", parentObject ); SObject childObject = createObject( "subField", "subValue" ); parentObject.addField( "subField", childObject ); assertEquals( "Get value of record with hierarchy", "subValue", conn.getRecordValue( sObject, "parentField.subField" ) ); XmlObject nullObject = new XmlObject( new QName( "nullField" ) ); sObject.addField( "nullField", nullObject ); assertEquals( "Get null value when relational query id is null", null, conn.getRecordValue( sObject, "nullField.childField" ) ); }
SalesforceUpdateMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testErrorHandling() { SalesforceStepMeta meta = new SalesforceUpdateMeta(); assertTrue( meta.supportsErrorHandling() ); }
SalesforceUpdateMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testGetFields() throws KettleStepException { SalesforceUpdateMeta meta = new SalesforceUpdateMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); r.clear(); r.addValueMeta( new ValueMetaString( "testString" ) ); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( ValueMetaInterface.TYPE_STRING, r.getValueMeta( 0 ).getType() ); assertEquals( "testString", r.getValueMeta( 0 ).getName() ); }
SalesforceUpdateMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testCheck() { SalesforceUpdateMeta meta = new SalesforceUpdateMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setUpdateLookup( new String[]{ "SalesforceField" } ); meta.setUpdateStream( new String[]{ "StreamField" } ); meta.setUseExternalId( new Boolean[]{ false } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
SalesforceUpdateMeta extends SalesforceStepMeta { public SalesforceUpdateMeta() { super(); } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testSalesforceUpdateMeta() throws KettleException { List<String> attributes = new ArrayList<String>(); attributes.addAll( SalesforceMetaTest.getDefaultAttributes() ); attributes.addAll( Arrays.asList( "batchSize", "updateLookup", "updateStream", "useExternalId", "rollbackAllChangesOnError" ) ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidators = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidators.put( "updateLookup", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "updateStream", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "useExternalId", new ArrayLoadSaveValidator<Boolean>( new BooleanLoadSaveValidator(), 50 ) ); LoadSaveTester loadSaveTester = new LoadSaveTester( SalesforceUpdateMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidators, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); }
SalesforceUpsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testErrorHandling() { SalesforceStepMeta meta = new SalesforceUpsertMeta(); assertTrue( meta.supportsErrorHandling() ); }
SalesforceUpsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testGetFields() throws KettleStepException { SalesforceUpsertMeta meta = new SalesforceUpsertMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "Id", r.getFieldNames()[0] ); meta.setSalesforceIDFieldName( "id_field" ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "id_field", r.getFieldNames()[0] ); }
SalesforceUpsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testCheck() { SalesforceUpsertMeta meta = new SalesforceUpsertMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setUpdateLookup( new String[]{ "SalesforceField" } ); meta.setUpdateStream( new String[]{ "StreamField" } ); meta.setUseExternalId( new Boolean[]{ false } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationClusterID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER ), id_cluster ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir, String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment, RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir, String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory, RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate, ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers, String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory, String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code, DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr, String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode, String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code, Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory, boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
@Test public void testInsertTransformationCluster() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); doReturn( new LongObjectId( 123 ) ).when( repo.connectionDelegate ).getNextTransformationClusterID(); ObjectId result = repo.insertTransformationCluster( new LongObjectId( 456 ), new LongObjectId( 789 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 123 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 456 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 789 ), insertRecord.getInteger( 2 ) ); assertEquals( new LongObjectId( 123 ), result ); }
SalesforceUpsertMeta extends SalesforceStepMeta { public SalesforceUpsertMeta() { super(); } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testSalesforceUpsertMeta() throws KettleException { List<String> attributes = new ArrayList<String>(); attributes.addAll( SalesforceMetaTest.getDefaultAttributes() ); attributes.addAll( Arrays.asList( "upsertField", "batchSize", "salesforceIDFieldName", "updateLookup", "updateStream", "useExternalId", "rollbackAllChangesOnError" ) ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidators = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidators.put( "updateLookup", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "updateStream", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "useExternalId", new ArrayLoadSaveValidator<Boolean>( new BooleanLoadSaveValidator(), 50 ) ); LoadSaveTester loadSaveTester = new LoadSaveTester( SalesforceUpsertMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidators, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); }
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
@Test public void testFieldWithExtIdYes_StandartObject() { inputFieldName = "Account:ExtID_AccountId__c/Account"; expectedFieldName = "AccountId"; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); assertEquals( expectedFieldName, fieldToNullName ); } @Test public void testFieldWithExtIdNo_StandartObject() { inputFieldName = "AccountId"; expectedFieldName = "AccountId"; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, false ); assertEquals( expectedFieldName, fieldToNullName ); } @Test public void testFieldWithExtIdYes_CustomObject() { inputFieldName = "ParentObject__c:Name/ParentObjectId__r"; expectedFieldName = "ParentObjectId__c"; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); assertEquals( expectedFieldName, fieldToNullName ); } @Test public void testFieldWithExtIdNo_CustomObject() { inputFieldName = "ParentObjectId__c"; expectedFieldName = "ParentObjectId__c"; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, false ); assertEquals( expectedFieldName, fieldToNullName ); } @Test public void testFieldWithExtIdYesButNameInIncorrectSyntax_StandartObject() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "Account"; expectedFieldName = inputFieldName; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); assertEquals( expectedFieldName, fieldToNullName ); } @Test public void testIncorrectExternalKeySyntaxWarnIsLoggedInDebugMode() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "AccountId"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock ).logDebug( "The field has incorrect external key syntax: AccountId. Syntax for external key should be : object:externalId/lookupField. Trying to use fieldToNullName=AccountId." ); } @Test public void testIncorrectExternalKeySyntaxWarnIsNotLoggedInNotDebugMode() { when( logMock.isDebug() ).thenReturn( false ); inputFieldName = "AccountId"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock, never() ).logDebug( "The field has incorrect external key syntax: AccountId. Syntax for external key should be : object:externalId/lookupField. Trying to use fieldToNullName=AccountId." ); } @Test public void testFinalNullFieldNameIsLoggedInDebugMode_StandartObject() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "Account:ExtID_AccountId__c/Account"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock ).logDebug( "fieldToNullName=AccountId" ); } @Test public void testFinalNullFieldNameIsLoggedInDebugMode_CustomObject() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "ParentObject__c:Name/ParentObjectId__r"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock ).logDebug( "fieldToNullName=ParentObjectId__c" ); }
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE ), id_slave ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_SLAVE, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir, String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment, RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir, String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory, RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate, ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers, String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory, String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code, DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr, String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode, String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code, Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory, boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
@Test public void testInsertTransformationSlave() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); doReturn( new LongObjectId( 789 ) ).when( repo.connectionDelegate ).getNextTransformationSlaveID(); ObjectId result = repo.insertTransformationSlave( new LongObjectId( 456 ), new LongObjectId( 123 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_TRANS_SLAVE, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 789 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 456 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 123 ), insertRecord.getInteger( 2 ) ); assertEquals( new LongObjectId( 789 ), result ); }
SalesforceDeleteMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testErrorHandling() { SalesforceStepMeta meta = new SalesforceDeleteMeta(); assertTrue( meta.supportsErrorHandling() ); }
SalesforceDeleteMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testGetFields() throws KettleStepException { SalesforceDeleteMeta meta = new SalesforceDeleteMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); r.clear(); r.addValueMeta( new ValueMetaString( "testString" ) ); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( ValueMetaInterface.TYPE_STRING, r.getValueMeta( 0 ).getType() ); assertEquals( "testString", r.getValueMeta( 0 ).getName() ); }
SalesforceDeleteMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testCheck() { SalesforceDeleteMeta meta = new SalesforceDeleteMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
SalesforceInputMeta extends SalesforceStepMeta { public SalesforceInputMeta() { super(); } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); static String DATE_TIME_FORMAT; }
@Test public void testSalesforceInputMeta() throws KettleException { List<String> attributes = new ArrayList<String>(); attributes.addAll( SalesforceMetaTest.getDefaultAttributes() ); attributes.addAll( Arrays.asList( "inputFields", "condition", "query", "specifyQuery", "includeTargetURL", "targetURLField", "includeModule", "moduleField", "includeRowNumber", "includeDeletionDate", "deletionDateField", "rowNumberField", "includeSQL", "sqlField", "includeTimestamp", "timestampField", "readFrom", "readTo", "recordsFilter", "queryAll", "rowLimit" ) ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); getterMap.put( "includeTargetURL", "includeTargetURL" ); getterMap.put( "includeModule", "includeModule" ); getterMap.put( "includeRowNumber", "includeRowNumber" ); getterMap.put( "includeDeletionDate", "includeDeletionDate" ); getterMap.put( "includeSQL", "includeSQL" ); getterMap.put( "sqlField", "getSQLField" ); setterMap.put( "sqlField", "setSQLField" ); getterMap.put( "includeTimestamp", "includeTimestamp" ); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidators = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidators.put( "inputFields", new ArrayLoadSaveValidator<SalesforceInputField>( new SalesforceInputFieldLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "recordsFilter", new RecordsFilterLoadSaveValidator() ); LoadSaveTester loadSaveTester = new LoadSaveTester( SalesforceInputMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidators, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); }
SalesforceInputMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int i; for ( i = 0; i < inputFields.length; i++ ) { SalesforceInputField field = inputFields[i]; int type = field.getType(); if ( type == ValueMetaInterface.TYPE_NONE ) { type = ValueMetaInterface.TYPE_STRING; } try { ValueMetaInterface v = ValueMetaFactory.createValueMeta( space.environmentSubstitute( field.getName() ), type ); v.setLength( field.getLength() ); v.setPrecision( field.getPrecision() ); v.setOrigin( name ); v.setConversionMask( field.getFormat() ); v.setDecimalSymbol( field.getDecimalSymbol() ); v.setGroupingSymbol( field.getGroupSymbol() ); v.setCurrencySymbol( field.getCurrencySymbol() ); r.addValueMeta( v ); } catch ( Exception e ) { throw new KettleStepException( e ); } } if ( includeTargetURL && !Utils.isEmpty( targetURLField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( targetURLField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeModule && !Utils.isEmpty( moduleField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( moduleField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeSQL && !Utils.isEmpty( sqlField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( sqlField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeTimestamp && !Utils.isEmpty( timestampField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( timestampField ) ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeRowNumber && !Utils.isEmpty( rowNumberField ) ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( rowNumberField ) ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeDeletionDate && !Utils.isEmpty( deletionDateField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( deletionDateField ) ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); static String DATE_TIME_FORMAT; }
@Test public void testGetFields() throws KettleStepException { SalesforceInputMeta meta = new SalesforceInputMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "field1" ) } ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); meta.setIncludeDeletionDate( true ); meta.setDeletionDateField( "DeletionDate" ); meta.setIncludeModule( true ); meta.setModuleField( "ModuleName" ); meta.setIncludeRowNumber( true ); meta.setRowNumberField( "RN" ); meta.setIncludeSQL( true ); meta.setSQLField( "sqlField" ); meta.setIncludeTargetURL( true ); meta.setTargetURLField( "Target" ); meta.setIncludeTimestamp( true ); meta.setTimestampField( "TS" ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 7, r.size() ); assertTrue( r.indexOfValue( "field1" ) >= 0 ); assertTrue( r.indexOfValue( "DeletionDate" ) >= 0 ); assertTrue( r.indexOfValue( "ModuleName" ) >= 0 ); assertTrue( r.indexOfValue( "RN" ) >= 0 ); assertTrue( r.indexOfValue( "sqlField" ) >= 0 ); assertTrue( r.indexOfValue( "Target" ) >= 0 ); assertTrue( r.indexOfValue( "TS" ) >= 0 ); }
SalesforceInputMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getInputFields().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); if ( includeTargetURL() && Utils.isEmpty( getTargetURLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTargetURLField" ), stepMeta ); remarks.add( cr ); } if ( includeSQL() && Utils.isEmpty( getSQLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoSQLField" ), stepMeta ); remarks.add( cr ); } if ( includeModule() && Utils.isEmpty( moduleField ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoModuleField" ), stepMeta ); remarks.add( cr ); } if ( includeTimestamp() && Utils.isEmpty( getTimestampField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTimestampField" ), stepMeta ); remarks.add( cr ); } if ( includeRowNumber() && Utils.isEmpty( getRowNumberField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoRowNumberField" ), stepMeta ); remarks.add( cr ); } if ( includeDeletionDate() && Utils.isEmpty( getDeletionDateField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoDeletionDateField" ), stepMeta ); remarks.add( cr ); } } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); static String DATE_TIME_FORMAT; }
@Test public void testCheck() { SalesforceInputMeta meta = new SalesforceInputMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "test" ) } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setIncludeDeletionDate( true ); meta.setIncludeModule( true ); meta.setIncludeRowNumber( true ); meta.setIncludeSQL( true ); meta.setIncludeTargetURL( true ); meta.setIncludeTimestamp( true ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "test" ) } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; int errorCount = 0; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; errorCount++; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); assertEquals( 6, errorCount ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setIncludeDeletionDate( true ); meta.setDeletionDateField( "delDate" ); meta.setIncludeModule( true ); meta.setModuleField( "mod" ); meta.setIncludeRowNumber( true ); meta.setRowNumberField( "rownum" ); meta.setIncludeSQL( true ); meta.setSQLField( "theSQL" ); meta.setIncludeTargetURL( true ); meta.setTargetURLField( "theURL" ); meta.setIncludeTimestamp( true ); meta.setTimestampField( "ts_Field" ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "test" ) } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; errorCount++; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
SalesforceInsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testErrorHandling() { SalesforceStepMeta meta = new SalesforceInsertMeta(); assertTrue( meta.supportsErrorHandling() ); }
SalesforceInsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testCheck() { SalesforceInsertMeta meta = new SalesforceInsertMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setUpdateLookup( new String[]{ "SalesforceField" } ); meta.setUpdateStream( new String[]{ "StreamField" } ); meta.setUseExternalId( new Boolean[]{ false } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
SalesforceInsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testGetFields() throws KettleStepException { SalesforceInsertMeta meta = new SalesforceInsertMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "Id", r.getFieldNames()[0] ); meta.setSalesforceIDFieldName( "id_field" ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "id_field", r.getFieldNames()[0] ); }
SalesforceInsertMeta extends SalesforceStepMeta { public SalesforceInsertMeta() { super(); } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testSalesforceInsertMeta() throws KettleException { List<String> attributes = new ArrayList<String>(); attributes.addAll( SalesforceMetaTest.getDefaultAttributes() ); attributes.addAll( Arrays.asList( "batchSize", "salesforceIDFieldName", "updateLookup", "updateStream", "useExternalId", "rollbackAllChangesOnError" ) ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidators = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidators.put( "updateLookup", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "updateStream", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "useExternalId", new ArrayLoadSaveValidator<Boolean>( new BooleanLoadSaveValidator(), 50 ) ); LoadSaveTester loadSaveTester = new LoadSaveTester( SalesforceInsertMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidators, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); }
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ) throws KettleException { String tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; RowMetaAndData table = new RowMetaAndData(); table .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP ), id_step ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION ), id_condition ); connectionDelegate.insertTableRow( tablename, table ); } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir, String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment, RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir, String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory, RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate, ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers, String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory, String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code, DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr, String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode, String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code, Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory, boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
@Test public void testInsertTransStepCondition() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); repo.insertTransStepCondition( new LongObjectId( 234 ), new LongObjectId( 567 ), new LongObjectId( 468 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 234 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 567 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 468 ), insertRecord.getInteger( 2 ) ); }
DataHandler extends AbstractXulEventHandler { public void loadAccessData() { getControls(); pushCache(); Object key = connectionBox.getSelectedItem(); if ( key == null ) { key = connectionMap.firstKey(); connectionBox.setSelectedItem( key ); return; } DatabaseInterface database = connectionMap.get( key ); int[] acc = database.getAccessTypeList(); Object accessKey = accessBox.getSelectedItem(); accessBox.removeItems(); for ( int value : acc ) { accessBox.addItem( DatabaseMeta.getAccessTypeDescLong( value ) ); } accessBox.setRows( accessBox.getRows() ); if ( accessKey != null ) { accessBox.setSelectedItem( accessKey ); } if ( accessBox.getSelectedItem() == null ) { accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( acc[0] ) ); } Map<String, String> options = null; if ( this.databaseMeta != null ) { this.databaseMeta.applyDefaultOptions( database ); options = this.databaseMeta.getExtraOptions(); } else { clearOptionsData(); options = database.getDefaultOptions(); } setOptionsData( options ); PartitionDatabaseMeta[] clusterInfo = null; if ( this.databaseMeta != null ) { clusterInfo = this.databaseMeta.getPartitioningInformation(); } setClusterData( clusterInfo ); popCache(); } DataHandler(); void loadConnectionData(); void loadAccessData(); void editOptions( int index ); void clearOptionsData(); void getOptionHelp(); void setDeckChildIndex(); void onPoolingCheck(); void onClusterCheck(); Object getData(); void setData( Object data ); void pushCache(); void popCache(); void onCancel(); void onOK(); void testDatabaseConnection(); void restoreDefaults(); void poolingRowChange( int idx ); void disablePortIfInstancePopulated(); void handleUseSecurityCheckbox(); static final SortedMap<String, DatabaseInterface> connectionMap; static final Map<String, String> connectionNametoID; }
@Test public void testLoadAccessData() throws Exception { when( accessBox.getSelectedItem() ).thenReturn( "Native" ); DatabaseInterface dbInterface = mock( DatabaseInterface.class ); when( dbInterface.getDefaultDatabasePort() ).thenReturn( 5309 ); DataHandler.connectionMap.put( "myDb", dbInterface ); dataHandler.loadAccessData(); dataHandler.loadAccessData(); }
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void getData( SalesforceInputMeta in ); void setPosition(); }
@Test public void testAddFieldsFromSOQLQuery() throws Exception { final Set<String> fields = new LinkedHashSet<>(); XmlObject testObject = createObject( "Field1", VALUE, ObjectType.XMLOBJECT ); dialog.addFields( "", fields, testObject ); dialog.addFields( "", fields, testObject ); assertArrayEquals( "No duplicates", new String[]{"Field1"}, fields.toArray() ); testObject = createObject( "Field2", VALUE, ObjectType.XMLOBJECT ); dialog.addFields( "", fields, testObject ); assertArrayEquals( "Two fields", new String[]{"Field1", "Field2"}, fields.toArray() ); } @Test public void testAddFields_nullIdNotAdded() throws Exception { final Set<String> fields = new LinkedHashSet<>(); XmlObject testObject = createObject( "Id", null, ObjectType.XMLOBJECT ); dialog.addFields( "", fields, testObject ); assertArrayEquals( "Null Id field not added", new String[]{}, fields.toArray() ); } @Test public void testAddFields_IdAdded() throws Exception { final Set<String> fields = new LinkedHashSet<>(); XmlObject testObject = createObject( "Id", VALUE, ObjectType.XMLOBJECT ); dialog.addFields( "", fields, testObject ); assertArrayEquals( "Id field added", new String[]{"Id"}, fields.toArray() ); }
RunConfigurationManager implements RunConfigurationService { public String[] getTypes() { List<String> types = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { types.add( runConfigurationProvider.getType() ); } return types.toArray( new String[ 0 ] ); } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider( RunConfigurationProvider defaultRunConfigurationProvider ); }
@Test public void testGetTypes() { String[] types = executionConfigurationManager.getTypes(); assertTrue( Arrays.asList( types ).contains( DefaultRunConfiguration.TYPE ) ); assertTrue( Arrays.asList( types ).contains( SparkRunConfiguration.TYPE ) ); }
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider( RunConfigurationProvider defaultRunConfigurationProvider ); }
@Test public void testLoad() { List<RunConfiguration> runConfigurations = executionConfigurationManager.load(); assertEquals( runConfigurations.size(), 3 ); } @Test public void testLoadByName() { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) executionConfigurationManager .load( "Default Configuration" ); assertNotNull( defaultRunConfiguration ); assertEquals( defaultRunConfiguration.getName(), "Default Configuration" ); }
RunConfigurationManager implements RunConfigurationService { public List<String> getNames() { List<String> names = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { names.addAll( runConfigurationProvider.getNames() ); } Collections.sort( names, ( o1, o2 ) -> { if ( o2.equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.compareToIgnoreCase( o2 ); } ); return names; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider( RunConfigurationProvider defaultRunConfigurationProvider ); }
@Test public void testGetNames() { List<String> names = executionConfigurationManager.getNames(); assertTrue( names.contains( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ); assertTrue( names.contains( "Default Configuration" ) ); assertTrue( names.contains( "Spark Configuration" ) ); }
RunConfigurationManager implements RunConfigurationService { public RunConfiguration getRunConfigurationByType( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getConfiguration(); } return null; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider( RunConfigurationProvider defaultRunConfigurationProvider ); }
@Test public void testGetRunConfigurationByType() { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) executionConfigurationManager.getRunConfigurationByType( DefaultRunConfiguration.TYPE ); SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) executionConfigurationManager.getRunConfigurationByType( SparkRunConfiguration.TYPE ); assertNotNull( defaultRunConfiguration ); assertNotNull( sparkRunConfiguration ); }
RunConfigurationManager implements RunConfigurationService { public RunConfigurationExecutor getExecutor( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getExecutor(); } return null; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider( RunConfigurationProvider defaultRunConfigurationProvider ); }
@Test public void testGetExecutor() { DefaultRunConfigurationExecutor defaultRunConfigurationExecutor = (DefaultRunConfigurationExecutor) executionConfigurationManager.getExecutor( DefaultRunConfiguration.TYPE ); assertNotNull( defaultRunConfigurationExecutor ); }
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } RunConfigurationRunExtensionPoint( RunConfigurationManager runConfigurationManager ); @Override void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ); }
@Test public void testCallExtensionPoint() throws Exception { runConfigurationRunExtensionPoint.callExtensionPoint( log, new Object[] { transExecutionConfiguration, abstractMeta, variableSpace } ); verify( runConfigurationExecutor ) .execute( runConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); } @Test public void testCallExtensionPointEmbedded() throws Exception { when( runConfigurationManager.load( "RUN_CONF" ) ).thenReturn( null ); try { runConfigurationRunExtensionPoint.callExtensionPoint( log, new Object[] { transExecutionConfiguration, abstractMeta, variableSpace } ); fail(); } catch ( Exception e ) { } }
KettleDatabaseRepositoryCreationHelper { public synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ) throws KettleException { RowMetaInterface table; String sql; String tablename; String schemaTable; String indexname; String[] keyfield; String[] user, pass, code, desc; int KEY = 9; log.logBasic( "Starting to create or modify the repository tables..." ); String message = ( upgrade ? "Upgrading " : "Creating" ) + " the Kettle repository..."; if ( monitor != null ) { monitor.beginTask( message, 31 ); } repository.connectionDelegate.setAutoCommit( true ); table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_REPOSITORY_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_REP_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_DATE ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_OPERATION_DESC, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( !dryrun ) { repository.insertLogEntry( ( upgrade ? "Upgrade" : "Creation" ) + " of the Kettle repository" ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_VERSION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VERSION_IS_UPGRADE, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, false ); boolean create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } try { LongObjectId nextId; if ( sql.toUpperCase().indexOf( "CREATE TABLE" ) < 0 ) { nextId = repository.connectionDelegate.getNextID( schemaTable, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION ); } else { nextId = new LongObjectId( 1L ); } Object[] data = new Object[] { nextId.longValue(), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MAJOR_VERSION ), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MINOR_VERSION ), new Date(), Boolean.valueOf( upgrade ), }; if ( dryrun ) { sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_VERSION, table, data, null ); statements.add( sql ); } else { database.execStatement( "INSERT INTO " + databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION ) + " VALUES(?, ?, ?, ?, ?)", table, data ); } } catch ( KettleException e ) { throw new KettleException( "Unable to insert new version log record into " + schemaTable, e ); } boolean ok_database_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_database_type ) { updateDatabaseTypes( statements, dryrun, create ); } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_database_contype = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } ok_database_contype = sql.toUpperCase().contains( "CREATE TABLE" ); if ( ok_database_contype ) { code = DatabaseMeta.dbAccessTypeCode; desc = DatabaseMeta.dbAccessTypeDesc; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextDatabaseConnectionTypeID(); } Object[] tableData = new Object[] { new LongObjectId( nextid ).longValue(), code[i], desc[i], }; if ( dryrun ) { sql = database.getSQLOutput( null, tablename, table, tableData, null ); statements.add( sql ); } else { database.setValuesInsert( table, tableData ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_WIDTH, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_HEIGHT, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_FONT_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_FONT_SIZE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_BOLD, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_ITALIC, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_DRAW_SHADOW, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_PORT, 7, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DATABASE_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DIRECTORY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DIRECTORY; keyfield = new String[] { KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANSFORMATION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD, 1, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( database.checkTableExists( schemaTable ) ) { sql = "SELECT * FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; List<Object[]> rows = database.getRows( sql, 1 ); if ( rows != null && rows.size() > 0 ) { sql = "UPDATE " + schemaTable + " SET " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + "=0 WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; statements.add( sql ); if ( !dryrun ) { database.execStatement( sql ); } } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_TRANS_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_JOB_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DEPENDENCY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_TABLE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_FIELD_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table .addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_PARTITION_ID, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, 0, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_WEB_APP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NON_PROXY_HOSTS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_SLAVE_MASTER ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANS_HOP_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CONDITION_NEGATED, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_OPERATOR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_LEFT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_CONDITION_FUNCTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_RIGHT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_VALUE_RIGHT, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_VALUE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_TYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_STR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VALUE_IS_NULL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_step_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, "ID_STEP_TYPE", false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_step_type ) { updateStepTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_COPIES, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ID_STEP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_STEP_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_loglevel = true; tablename = KettleDatabaseRepository.TABLE_R_LOGLEVEL; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_loglevel ) { code = LogLevel.logLogLevelCodes(); desc = LogLevel.getLogLevelDescriptions(); if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 1; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ) + " FROM " + schemaTable + " WHERE " + database.getDatabaseMeta().quoteField( "CODE" ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i ); if ( !create ) { nextid = repository.connectionDelegate.getNextLoglevelID(); } RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION ), desc[i] ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData.getRowMeta(), tableData.getData() ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_LOGTYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILENAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILEEXTENTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_DATE, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_TIME, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOG_ID_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, 0, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_jobentry_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_jobentry_type ) { updateJobEntryTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_COPY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 4, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM, 13, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NAMESPACE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, ( database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH ), 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_NAME, getRepoStringLength(), 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_NAME, getRepoStringLength(), 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_KEY, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_VALUE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } Map<String, ObjectId> users = new Hashtable<String, ObjectId>(); boolean ok_user = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_USER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_USER_ID_USER, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_user ) { user = new String[] { "admin", "guest" }; pass = new String[] { "admin", "guest" }; code = new String[] { "Administrator", "Guest account" }; desc = new String[] { "User manager", "Read-only guest account" }; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < user.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_USER_LOGIN ) + " = '" + user[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextUserID(); } String password = Encr.encryptPassword( pass[i] ); RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN ), user[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD ), password ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION ), desc[i] ); tableData.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED ), Boolean.TRUE ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData ); database.insertRow(); } users.put( user[i], nextid ); } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } log.logBasic( ( upgrade ? "Upgraded" : "Created" ) + " " + KettleDatabaseRepository.repositoryTableNames.length + " repository tables." ); } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ); List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ); List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ); void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ); }
@Test public void testCreateIndexLenghts() throws KettleException { DatabaseMeta meta = mock( DatabaseMeta.class ); when( meta.getStartQuote() ).thenReturn( "" ); when( meta.getEndQuote() ).thenReturn( "" ); when( meta.getQuotedSchemaTableCombination( anyString(), anyString() ) ).thenAnswer( new Answer<String>() { @Override public String answer( InvocationOnMock invocation ) throws Throwable { return invocation.getArguments()[1].toString(); } } ); when( meta.getDatabaseInterface() ).thenReturn( new OracleDatabaseMeta() ); Database db = mock( Database.class ); when( db.getDatabaseMeta() ).thenReturn( meta ); when( db.getDDL( anyString(), any( RowMetaInterface.class ), anyString(), anyBoolean(), anyString(), anyBoolean() ) ).thenReturn( "### CREATE TABLE;" ); when( repository.getDatabase() ).thenReturn( db ); when( repository.getDatabaseMeta() ).thenReturn( meta ); when( db.getCreateIndexStatement( anyString(), anyString(), any( String[].class ), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean() ) ).thenAnswer( lan ); KettleDatabaseRepositoryCreationHelper helper = new KettleDatabaseRepositoryCreationHelper( repository ); PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init( true ); String passwordEncoderPluginID = Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_PASSWORD_ENCODER_PLUGIN ), "Kettle" ); Encr.init( passwordEncoderPluginID ); List<String> statements = new ArrayList<String>(); helper.createRepositorySchema( null, false, statements, true ); for ( String st : statements ) { if ( st == null || st.startsWith( "#" ) ) { continue; } assertTrue( "Index name is not overlenght!: " + st, st.length() <= 30 ); } } @Test public void testCreateIndexLenghts() throws KettleException { DatabaseMeta meta = mock( DatabaseMeta.class ); when( meta.getStartQuote() ).thenReturn( "" ); when( meta.getEndQuote() ).thenReturn( "" ); when( meta.getQuotedSchemaTableCombination( anyString(), anyString() ) ).thenAnswer( new Answer<String>() { @Override public String answer( InvocationOnMock invocation ) throws Throwable { return invocation.getArguments()[1].toString(); } } ); when( meta.getDatabaseInterface() ).thenReturn( new OracleDatabaseMeta() ); Database db = mock( Database.class ); when( db.getDatabaseMeta() ).thenReturn( meta ); when( db.getDDL( anyString(), any( RowMetaInterface.class ), anyString(), anyBoolean(), anyString(), anyBoolean() ) ).thenReturn( "### CREATE TABLE;" ); when( repository.getDatabase() ).thenReturn( db ); when( repository.getDatabaseMeta() ).thenReturn( meta ); when( db.getCreateIndexStatement( anyString(), anyString(), any( String[].class ), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean() ) ).thenAnswer( lan ); KettleDatabaseRepositoryCreationHelper helper = new KettleDatabaseRepositoryCreationHelper( repository ); PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init( false ); String passwordEncoderPluginID = Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_PASSWORD_ENCODER_PLUGIN ), "Kettle" ); Encr.init( passwordEncoderPluginID ); List<String> statements = new ArrayList<String>(); helper.createRepositorySchema( null, false, statements, true ); for ( String st : statements ) { if ( st == null || st.startsWith( "#" ) ) { continue; } assertTrue( "Index name is not overlenght!: " + st, st.length() <= 30 ); } }
RunConfigurationImportExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { AbstractMeta abstractMeta = (AbstractMeta) o; final EmbeddedMetaStore embeddedMetaStore = abstractMeta.getEmbeddedMetaStore(); RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); List<RunConfiguration> runConfigurationList = embeddedRunConfigurationManager.load(); for ( RunConfiguration runConfiguration : runConfigurationList ) { if ( !runConfiguration.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { runConfigurationManager.save( runConfiguration ); } } } RunConfigurationImportExtensionPoint( RunConfigurationManager runConfigurationManager ); @Override void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ); }
@Test public void testCallExtensionPoint() throws Exception { runConfigurationImportExtensionPoint.callExtensionPoint( log, abstractMeta ); verify( abstractMeta ).getEmbeddedMetaStore(); }
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ); }
@Test public void testExecuteLocalTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( true ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); assertTrue( transExecutionConfiguration.isExecutingLocally() ); } @Test public void testExecuteRemoteTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( "Test Server" ); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); assertFalse( transExecutionConfiguration.isExecutingLocally() ); assertTrue( transExecutionConfiguration.isExecutingRemotely() ); assertEquals( transExecutionConfiguration.getRemoteServer(), slaveServer ); } @Test public void testExecutePentahoTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setPentaho( true ); defaultRunConfiguration.setRemote( false ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); assertFalse( transExecutionConfiguration.isExecutingLocally() ); assertFalse( transExecutionConfiguration.isExecutingRemotely() ); } @Test public void testExecuteClusteredTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( false ); defaultRunConfiguration.setClustered( true ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); assertTrue( transExecutionConfiguration.isExecutingClustered() ); assertFalse( transExecutionConfiguration.isExecutingRemotely() ); assertFalse( transExecutionConfiguration.isExecutingLocally() ); } @Test public void testExecuteRemoteNotFoundTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( null ); try { defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); fail(); } catch ( KettleException e ) { } } @Test public void testExecuteLocalJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( true ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace ); assertTrue( jobExecutionConfiguration.isExecutingLocally() ); } @Test public void testExecuteRemoteJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( "Test Server" ); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace ); assertFalse( jobExecutionConfiguration.isExecutingLocally() ); assertTrue( jobExecutionConfiguration.isExecutingRemotely() ); assertEquals( jobExecutionConfiguration.getRemoteServer(), slaveServer ); } @Test public void testExecuteRemoteNotFoundJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( null ); try { defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace ); fail(); } catch ( KettleException e ) { } } @Test public void testExecutePentahoJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setPentaho( true ); defaultRunConfiguration.setRemote( false ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace ); assertFalse( jobExecutionConfiguration.isExecutingLocally() ); assertFalse( jobExecutionConfiguration.isExecutingRemotely() ); }
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ); List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ); List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ); void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ); }
@Test public void testOracleDBRepoStringLength() throws Exception { KettleEnvironment.init(); DatabaseMeta databaseMeta = new DatabaseMeta( "OraRepo", "ORACLE", "JDBC", null, "test", null, null, null ); repositoryMeta = new KettleDatabaseRepositoryMeta( "KettleDatabaseRepository", "OraRepo", "Ora Repository", databaseMeta ); repository = new KettleDatabaseRepository(); repository.init( repositoryMeta ); KettleDatabaseRepositoryCreationHelper helper = new KettleDatabaseRepositoryCreationHelper( repository ); int repoStringLength = helper.getRepoStringLength(); assertEquals( EXPECTED_ORACLE_DB_REPO_STRING, repoStringLength ); } @Test public void testDefaultDBRepoStringLength() throws Exception { KettleEnvironment.init(); DatabaseMeta databaseMeta = new DatabaseMeta(); databaseMeta.setDatabaseInterface( new TestDatabaseMeta() ); repositoryMeta = new KettleDatabaseRepositoryMeta( "KettleDatabaseRepository", "TestRepo", "Test Repository", databaseMeta ); repository = new KettleDatabaseRepository(); repository.init( repositoryMeta ); KettleDatabaseRepositoryCreationHelper helper = new KettleDatabaseRepositoryCreationHelper( repository ); int repoStringLength = helper.getRepoStringLength(); assertEquals( EXPECTED_DEFAULT_DB_REPO_STRING, repoStringLength ); }
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
@Test public void testExecuteRSADaemon() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( "ws: doReturn( "1.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine", "remote" ); verify( variableSpace ).setVariable( "engine.remote", "spark" ); verify( properties ).put( "zookeeper.host", "127.0.0.2" ); verify( properties ).put( "zookeeper.port", "8121" ); verify( variableSpace ).setVariable( "engine.host", null ); verify( variableSpace ).setVariable( "engine.port", null ); } @Test public void testExecuteNoPortRSADaemon() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( "zk: TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); doReturn( "1.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine", "remote" ); verify( variableSpace ).setVariable( "engine.remote", "spark" ); verify( properties ).put( "zookeeper.host", SparkRunConfigurationExecutor.DEFAULT_HOST ); verify( properties ).put( "zookeeper.port", SparkRunConfigurationExecutor.DEFAULT_ZOOKEEPER_PORT ); verify( variableSpace ).setVariable( "engine.host", null ); verify( variableSpace ).setVariable( "engine.port", null ); } @Test public void testWebSocketVersionExecute() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( "http: doReturn( "2.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine", "remote" ); verify( variableSpace ).setVariable( "engine.remote", "spark" ); verify( properties ).remove( "zookeeper.host" ); verify( properties ).remove( "zookeeper.port" ); verify( variableSpace ).setVariable( "engine.host", "127.0.0.2" ); verify( variableSpace ).setVariable( "engine.port", "8121" ); } @Test public void testWebSocketVersionExecuteNoPort() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); doReturn( "2.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine", "remote" ); verify( variableSpace ).setVariable( "engine.remote", "spark" ); verify( properties ).remove( "zookeeper.host" ); verify( properties ).remove( "zookeeper.port" ); verify( variableSpace ).setVariable( "engine.protocol", SparkRunConfigurationExecutor.DEFAULT_PROTOCOL ); verify( variableSpace ).setVariable( "engine.host", SparkRunConfigurationExecutor.DEFAULT_HOST ); verify( variableSpace ).setVariable( "engine.port", SparkRunConfigurationExecutor.DEFAULT_WEBSOCKET_PORT ); } @Test public void testWssWebSocketVersionExecute() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( "wss: doReturn( "2.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine.protocol", "wss" ); verify( variableSpace ).setVariable( "engine.host", "127.0.0.2" ); verify( variableSpace ).setVariable( "engine.port", "8121" ); } @Test public void testUrlWssWebSocketVersionExecute() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( " ws: doReturn( "2.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine.protocol", "ws" ); verify( variableSpace ).setVariable( "engine.host", "127.0.0.2" ); verify( variableSpace ).setVariable( "engine.port", "8121" ); } @Test public void testExecuteWithAelSecurityInstalled() { ICapability aelSecurityCapability = mock( ICapability.class ); setCapability( aelSecurityCapability, SparkRunConfigurationExecutor.AEL_SECURITY_CAPABILITY_ID, true ); ICapability jaasCapability = mock( ICapability.class ); setCapability( jaasCapability, SparkRunConfigurationExecutor.JAAS_CAPABILITY_ID, false ); SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( jaasCapability ).isInstalled(); verify( jaasCapability ).install(); } @Test public void testExecuteWithNoAelSecurityInstalled() { ICapability aelSecurityCapability = mock( ICapability.class ); setCapability( aelSecurityCapability, SparkRunConfigurationExecutor.AEL_SECURITY_CAPABILITY_ID, false ); ICapability jaasCapability = mock( ICapability.class ); setCapability( jaasCapability, SparkRunConfigurationExecutor.JAAS_CAPABILITY_ID, false ); SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( jaasCapability, never() ).isInstalled(); }
GPBulkLoader extends BaseStep implements StepInterface { public String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ) throws KettleException { DatabaseMeta dm = meta.getDatabaseMeta(); String inputName = "'" + environmentSubstitute( meta.getDataFile() ) + "'"; String loadAction = meta.getLoadAction(); StringBuffer contents = new StringBuffer( 500 ); String tableName = dm.getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTableName() ) ); if ( loadAction.equalsIgnoreCase( "truncate" ) ) { contents.append( loadAction + " " ); contents.append( tableName + ";" ); contents.append( Const.CR ); } contents.append( "\\COPY " ); contents.append( tableName ); contents.append( " ( " ); String[] streamFields = meta.getFieldStream(); String[] tableFields = meta.getFieldTable(); if ( streamFields == null || streamFields.length == 0 ) { throw new KettleException( "No fields defined to load to database" ); } for ( int i = 0; i < streamFields.length; i++ ) { if ( i != 0 ) { contents.append( ", " ); } contents.append( dm.quoteField( tableFields[i] ) ); } contents.append( " ) " ); contents.append( " FROM " ); contents.append( inputName ); contents.append( " WITH CSV " ); contents.append( "LOG ERRORS INTO " ); contents.append( tableName + "_errors " ); contents.append( " SEGMENT REJECT LIMIT " ); contents.append( meta.getMaxErrors() ); return contents.toString(); } GPBulkLoader( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ); void createControlFile( String filename, Object[] row, GPBulkLoaderMeta meta ); String createCommandLine( GPBulkLoaderMeta meta, boolean password ); boolean execute( GPBulkLoaderMeta meta, boolean wait ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testInputFileSurroundedBySingleQuotes() throws Exception { String datafile = "test-data-file"; loader = new GPBulkLoader( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ); DatabaseMeta dbMetaMock = mock( DatabaseMeta.class ); doReturn( "" ).when( dbMetaMock ).getQuotedSchemaTableCombination( anyString(), anyString() ); doReturn( "" ).when( dbMetaMock ).quoteField( anyString() ); GPBulkLoaderMeta meta = new GPBulkLoaderMeta(); meta.setLoadAction( "" ); meta.setFieldStream( new String[] { "" } ); meta.setFieldTable( new String[] { "" } ); meta.setDatabaseMeta( dbMetaMock ); meta.setDataFile( datafile ); String actual = loader.getControlFileContents( meta, null, null ); int first = actual.indexOf( datafile ); if ( first > 0 ) { if ( actual.charAt( first - 1 ) != '\'' || actual.charAt( first + datafile.length() ) != '\'' ) { Assert.fail( "Datafile name is not surrounded by single quotes. Actual control file: " + actual ); } } else { Assert.fail( "Datafile name not found in control file. Actual control file: " + actual ); } }
AggregateRows extends BaseStep implements StepInterface { public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (AggregateRowsMeta) smi; data = (AggregateRowsData) sdi; Object[] r = getRow(); if ( r == null ) { Object[] agg = buildAggregate(); putRow( data.outputRowMeta, agg ); setOutputDone(); return false; } if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); for ( int i = 0; i < meta.getFieldName().length; i++ ) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i] ); if ( data.fieldnrs[i] < 0 ) { logError( BaseMessages.getString( PKG, "AggregateRows.Log.CouldNotFindField", meta.getFieldName()[i] ) ); setErrors( 1 ); stopAll(); return false; } data.counts[i] = 0L; } } AddAggregate( getInputRowMeta(), r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "AggregateRows.Log.LineNumber" ) + getLinesRead() ); } } return true; } AggregateRows( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testProcessRow() throws KettleException { AggregateRows aggregateRows = new AggregateRows( stepMockHelper.stepMeta, stepMockHelper.stepDataInterface, 0, stepMockHelper.transMeta, stepMockHelper.trans ); aggregateRows.init( stepMockHelper.initStepMetaInterface, stepMockHelper.initStepDataInterface ); aggregateRows.setInputRowSets( new ArrayList<RowSet>( Arrays.asList( createSourceRowSet( "TEST" ) ) ) ); int fieldSize = stepMockHelper.initStepMetaInterface.getFieldName().length; AggregateRowsData data = new AggregateRowsData(); data.fieldnrs = new int[ fieldSize ]; data.counts = new long[ fieldSize ]; data.values = new Object[ fieldSize ]; assertTrue( aggregateRows.processRow( stepMockHelper.initStepMetaInterface, data ) ); assertTrue( aggregateRows.getErrors() == 0 ); assertTrue( aggregateRows.getLinesRead() > 0 ); RowMetaInterface outputRowMeta = mock( RowMetaInterface.class ); when( outputRowMeta.size() ).thenReturn( fieldSize ); data.outputRowMeta = outputRowMeta; assertFalse( aggregateRows.processRow( stepMockHelper.initStepMetaInterface, data ) ); assertTrue( aggregateRows.getLinesWritten() > 0 ); }
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users, IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles, IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles, IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments, IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments, IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments, IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments, IRoleSupportSecurityManager rsm ); }
@Test public void convertFromProxyPentahoUser_RetunsNull_WhenErrorOccurs() throws Exception { IRoleSupportSecurityManager manager = mock( IRoleSupportSecurityManager.class ); when( manager.constructUser() ).thenThrow( new KettleException() ); IUser user = convertFromProxyPentahoUser( new ProxyPentahoUser(), Collections.<UserToRoleAssignment> emptyList(), manager ); assertNull( user ); } @Test public void convertFromProxyPentahoUser_CopiesDataFromInput() throws Exception { IRoleSupportSecurityManager manager = mockSecurityManager( false ); ProxyPentahoUser pentahoUser = pentahoUser( "name" ); pentahoUser.setPassword( "password" ); pentahoUser.setDescription( "desc" ); pentahoUser.setEnabled( true ); IUser user = convertFromProxyPentahoUser( pentahoUser, Collections.<UserToRoleAssignment> emptyList(), manager ); assertNotNull( user ); assertEquals( pentahoUser.getName(), user.getName() ); assertEquals( pentahoUser.getName(), user.getLogin() ); assertEquals( pentahoUser.getPassword(), user.getPassword() ); assertEquals( pentahoUser.getDescription(), user.getDescription() ); assertEquals( pentahoUser.getEnabled(), user.isEnabled() ); } @Test public void convertFromProxyPentahoUser_CopiesRolesForEeUser() throws Exception { IRoleSupportSecurityManager manager = mockSecurityManager( true ); ProxyPentahoUser pentahoUser = pentahoUser( "name" ); List<UserToRoleAssignment> assignments = Collections.singletonList( new UserToRoleAssignment( "name", "role" ) ); EEUserInfo user = (EEUserInfo) convertFromProxyPentahoUser( pentahoUser, assignments, manager ); assertNotNull( user ); assertEquals( pentahoUser.getName(), user.getName() ); assertEquals( 1, user.getRoles().size() ); assertEquals( "role", user.getRoles().iterator().next().getName() ); }
UserRoleHelper implements java.io.Serializable { public static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ) { List<ProxyPentahoUser> users = info.getUsers(); if ( users == null || users.isEmpty() ) { return Collections.emptyList(); } List<UserToRoleAssignment> assignments = info.getAssignments(); List<IUser> userList = new ArrayList<IUser>( users.size() ); for ( ProxyPentahoUser user : users ) { userList.add( convertFromProxyPentahoUser( user, assignments, rsm ) ); } return userList; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users, IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles, IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles, IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments, IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments, IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments, IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments, IRoleSupportSecurityManager rsm ); }
@Test public void convertFromProxyPentahoUsers_ReturnsEmptyList_WhenUsersAreAbsent() throws Exception { UserRoleSecurityInfo info = new UserRoleSecurityInfo(); info.setUsers( null ); IRoleSupportSecurityManager manager = mockSecurityManager( false ); List<IUser> users = convertFromProxyPentahoUsers( info, manager ); assertNotNull( users ); assertTrue( users.isEmpty() ); } @Test public void convertFromProxyPentahoUsers_CopiesEachUser() throws Exception { UserRoleSecurityInfo info = new UserRoleSecurityInfo(); info.setUsers( Arrays.asList( pentahoUser( "user1" ), pentahoUser( "user2" ) ) ); IRoleSupportSecurityManager manager = mockSecurityManager( false ); List<IUser> users = convertFromProxyPentahoUsers( info, manager ); assertNotNull( users ); assertEquals( 2, users.size() ); assertEquals( "user1", users.get( 0 ).getName() ); assertEquals( "user2", users.get( 1 ).getName() ); }
AbsSecurityProvider extends PurRepositorySecurityProvider implements IAbsSecurityProvider, java.io.Serializable { @Override public void validateAction( RepositoryOperation... operations ) throws KettleException { for ( RepositoryOperation operation : operations ) { switch ( operation ) { case EXECUTE_TRANSFORMATION: case EXECUTE_JOB: checkOperationAllowed( EXECUTE_CONTENT_ACTION ); break; case MODIFY_TRANSFORMATION: case MODIFY_JOB: checkOperationAllowed( CREATE_CONTENT_ACTION ); break; case SCHEDULE_TRANSFORMATION: case SCHEDULE_JOB: checkOperationAllowed( SCHEDULE_CONTENT_ACTION ); break; } } } AbsSecurityProvider( PurRepository repository, PurRepositoryMeta repositoryMeta, IUser userInfo, ServiceManager serviceManager ); List<String> getAllowedActions( String nameSpace ); boolean isAllowed( String actionName ); @Override void validateAction( RepositoryOperation... operations ); }
@Test( expected = KettleException.class ) public void exceptionThrown_WhenOperationNotAllowed_ExecuteOperation() throws Exception { setOperationPermissions( IAbsSecurityProvider.EXECUTE_CONTENT_ACTION, false ); provider.validateAction( RepositoryOperation.EXECUTE_TRANSFORMATION ); } @Test( expected = KettleException.class ) public void exceptionThrown_WhenOperationNotAllowed_ScheduleOperation() throws Exception { setOperationPermissions( IAbsSecurityProvider.SCHEDULE_CONTENT_ACTION, false ); provider.validateAction( RepositoryOperation.SCHEDULE_JOB ); } @Test( expected = KettleException.class ) public void exceptionThrown_WhenOperationNotAllowed_CreateOperation() throws Exception { setOperationPermissions( IAbsSecurityProvider.CREATE_CONTENT_ACTION, false ); provider.validateAction( RepositoryOperation.MODIFY_JOB ); } @Test public void noExceptionThrown_WhenOperationIsAllowed_ScheduleOperation() throws Exception { setOperationPermissions( IAbsSecurityProvider.EXECUTE_CONTENT_ACTION, true ); provider.validateAction( RepositoryOperation.EXECUTE_JOB ); } @Test public void noExceptionThrown_WhenOperationIsAllowed_CreateOperation() throws Exception { setOperationPermissions( IAbsSecurityProvider.SCHEDULE_CONTENT_ACTION, true ); provider.validateAction( RepositoryOperation.SCHEDULE_TRANSFORMATION ); } @Test public void noExceptionThrown_WhenOperationIsAllowed_ExecuteOperation() throws Exception { setOperationPermissions( IAbsSecurityProvider.CREATE_CONTENT_ACTION, true ); provider.validateAction( RepositoryOperation.MODIFY_TRANSFORMATION ); }
KettleDatabaseRepositorySecurityProvider extends BaseRepositorySecurityProvider implements RepositorySecurityProvider, RepositorySecurityManager, RepositorySecurityUserValidator { public void saveUserInfo( IUser userInfo ) throws KettleException { normalizeUserInfo( userInfo ); if ( !validateUserInfo( userInfo ) ) { throw new KettleException( BaseMessages.getString( KettleDatabaseRepositorySecurityProvider.class, "KettleDatabaseRepositorySecurityProvider.ERROR_0001_UNABLE_TO_CREATE_USER" ) ); } if ( userInfo.getObjectId() != null ) { throw new IllegalArgumentException( "Use updateUser() for updating" ); } String userLogin = userInfo.getLogin(); ObjectId exactMatch = userDelegate.getUserID( userLogin ); if ( exactMatch != null ) { throw new KettleException( BaseMessages.getString( KettleDatabaseRepositorySecurityProvider.class, "KettleDatabaseRepositorySecurityProvider.ERROR_0001_USER_NAME_ALREADY_EXISTS" ) ); } userDelegate.saveUserInfo( userInfo ); } KettleDatabaseRepositorySecurityProvider( KettleDatabaseRepository repository, RepositoryMeta repositoryMeta, IUser userInfo ); boolean isReadOnly(); boolean isLockingPossible(); boolean allowsVersionComments( String fullPath ); boolean isVersionCommentMandatory(); IUser loadUserInfo( String login ); void saveUserInfo( IUser userInfo ); void validateAction( RepositoryOperation... operations ); synchronized void delUser( ObjectId id_user ); synchronized ObjectId getUserID( String login ); ObjectId[] getUserIDs(); synchronized String[] getUserLogins(); synchronized void renameUser( ObjectId id_user, String newname ); void deleteUsers( List<IUser> users ); List<IUser> getUsers(); void setUsers( List<IUser> users ); void delUser( String name ); void updateUser( IUser user ); IUser constructUser(); List<String> getAllRoles(); List<String> getAllUsers(); boolean isManaged(); @Override boolean isVersioningEnabled( String fullPath ); @Override boolean validateUserInfo( IUser user ); @Override void normalizeUserInfo( IUser user ); }
@Test( expected = KettleException.class ) public void saveUserInfo_NormalizesInfo_FailsIfStillBreaches() throws Exception { provider.saveUserInfo( new UserInfo( " " ) ); }
UserRoleDelegate implements java.io.Serializable { public void createUser( IUser newUser ) throws KettleException { ensureHasPermissions(); ProxyPentahoUser user = UserRoleHelper.convertToPentahoProxyUser( newUser ); try { ProxyPentahoUser[] existingUsers = userRoleWebService.getUsers(); if ( existsAmong( existingUsers, user ) ) { throw userExistsException(); } } catch ( UserRoleException e ) { throw cannotCreateUserException( newUser, e ); } try { userRoleWebService.createUser( user ); if ( newUser instanceof IEEUser ) { userRoleWebService .setRoles( user, UserRoleHelper.convertToPentahoProxyRoles( ( (IEEUser) newUser ).getRoles() ) ); } lookupCache.insertUserToLookupSet( newUser ); fireUserRoleListChange(); } catch ( Exception e ) { if ( e.getCause().toString().contains( "org.pentaho.platform.api.engine.security.userroledao.AlreadyExistsException" ) ) { throw userExistsException(); } throw cannotCreateUserException( newUser, e ); } } UserRoleDelegate( IRoleSupportSecurityManager rsm, PurRepositoryMeta repositoryMeta, IUser userInfo, Log logger, ServiceManager serviceManager ); UserRoleDelegate( Log logger, IUserRoleListWebService userDetailsRoleListWebService, IUserRoleWebService userRoleWebService ); void updateUserRoleInfo(); boolean isManaged(); void createUser( IUser newUser ); void deleteUsers( List<IUser> users ); void deleteUser( String name ); void setUsers( List<IUser> users ); IUser getUser( String name, String password ); IUser getUser( String name ); List<IUser> getUsers(); void updateUser( IUser user ); void createRole( IRole newRole ); void deleteRoles( List<IRole> roles ); IRole getRole( String name ); List<IRole> getRoles(); List<IRole> getDefaultRoles(); void updateRole( IRole role ); void deleteRole( String name ); void setRoles( List<IRole> roles ); void addUserRoleListChangeListener( IUserRoleListChangeListener listener ); void removeUserRoleListChangeListener( IUserRoleListChangeListener listener ); }
@Test public void createUser_CreatesSuccessfully_WhenNameIsUnique() throws Exception { final String name = "user"; delegate.createUser( new UserInfo( name ) ); verify( roleWebService ).createUser( any( ProxyPentahoUser.class ) ); }
UserRoleDelegate implements java.io.Serializable { public void createRole( IRole newRole ) throws KettleException { ensureHasPermissions(); ProxyPentahoRole role = UserRoleHelper.convertToPentahoProxyRole( newRole ); try { ProxyPentahoRole[] existingRoles = userRoleWebService.getRoles(); if ( existsAmong( existingRoles, role ) ) { throw roleExistsException(); } } catch ( UserRoleException e ) { throw cannotCreateRoleException( newRole, e ); } try { userRoleWebService.createRole( role ); userRoleWebService.setUsers( role, UserRoleHelper.convertToPentahoProxyUsers( newRole.getUsers() ) ); lookupCache.insertRoleToLookupSet( newRole ); fireUserRoleListChange(); } catch ( UserRoleException e ) { throw cannotCreateRoleException( newRole, e ); } catch ( Exception e ) { if ( e.getCause().toString().contains( "org.pentaho.platform.api.engine.security.userroledao.AlreadyExistsException" ) ) { throw roleExistsException(); } } } UserRoleDelegate( IRoleSupportSecurityManager rsm, PurRepositoryMeta repositoryMeta, IUser userInfo, Log logger, ServiceManager serviceManager ); UserRoleDelegate( Log logger, IUserRoleListWebService userDetailsRoleListWebService, IUserRoleWebService userRoleWebService ); void updateUserRoleInfo(); boolean isManaged(); void createUser( IUser newUser ); void deleteUsers( List<IUser> users ); void deleteUser( String name ); void setUsers( List<IUser> users ); IUser getUser( String name, String password ); IUser getUser( String name ); List<IUser> getUsers(); void updateUser( IUser user ); void createRole( IRole newRole ); void deleteRoles( List<IRole> roles ); IRole getRole( String name ); List<IRole> getRoles(); List<IRole> getDefaultRoles(); void updateRole( IRole role ); void deleteRole( String name ); void setRoles( List<IRole> roles ); void addUserRoleListChangeListener( IUserRoleListChangeListener listener ); void removeUserRoleListChangeListener( IUserRoleListChangeListener listener ); }
@Test public void createRole_CreatesSuccessfully_WhenNameIsUnique() throws Exception { final String name = "role"; delegate.createRole( new EERoleInfo( name ) ); verify( roleWebService ).createRole( any( ProxyPentahoRole.class ) ); }
ActiveCache { public Value get( Key key ) throws Exception { ActiveCacheResult<Value> result = null; Future<ActiveCacheResult<Value>> futureResult = null; synchronized ( this ) { result = valueMap.get( key ); boolean shouldReload = false; long time = System.currentTimeMillis(); if ( result == null || result.getTimeLoaded() + timeout < time ) { result = null; shouldReload = true; } else if ( result.getTimeLoaded() + ( timeout / 2.0 ) < time ) { shouldReload = true; } if ( shouldReload ) { futureResult = loadingMap.get( key ); if ( futureResult == null ) { futureResult = executorServiceGetter.getExecutor().submit( new ActiveCacheCallable<Key, Value>( this, valueMap, loadingMap, key, loader ) ); loadingMap.put( key, futureResult ); } } } if ( result == null ) { result = futureResult.get(); } Exception exception = result.getException(); if ( exception != null ) { throw exception; } return result.getValue(); } ActiveCache( ActiveCacheLoader<Key, Value> loader, long timeout ); ActiveCache( ActiveCacheLoader<Key, Value> loader, long timeout, ExecutorServiceGetter executorServiceGetter ); ActiveCache( ActiveCacheLoader<Key, Value> loader, Map<Key, ActiveCacheResult<Value>> valueMap, Map<Key, Future<ActiveCacheResult<Value>>> loadingMap, long timeout, ExecutorServiceGetter executorServiceGetter ); Value get( Key key ); }
@Test public void testActiveCacheLoadsWhenNull() throws Exception { long timeout = 100; @SuppressWarnings( "unchecked" ) ActiveCacheLoader<String, String> mockLoader = mock( ActiveCacheLoader.class ); ActiveCache<String, String> cache = new ActiveCache<String, String>( mockLoader, timeout ); String testKey = "TEST-KEY"; String testResult = "TEST-RESULT"; when( mockLoader.load( testKey ) ).thenReturn( testResult ); assertEquals( testResult, cache.get( testKey ) ); verify( mockLoader, times( 1 ) ).load( testKey ); } @Test public void testActiveCacheLoadsWhenTimedOut() throws Exception { long timeout = 100; @SuppressWarnings( "unchecked" ) ActiveCacheLoader<String, String> mockLoader = mock( ActiveCacheLoader.class ); ActiveCache<String, String> cache = new ActiveCache<String, String>( mockLoader, timeout ); String testKey = "TEST-KEY"; String testResult = "TEST-RESULT"; String testResult2 = "TEST-RESULT-2"; when( mockLoader.load( testKey ) ).thenReturn( testResult ).thenReturn( testResult2 ); assertEquals( testResult, cache.get( testKey ) ); Thread.sleep( timeout + 10 ); assertEquals( testResult2, cache.get( testKey ) ); verify( mockLoader, times( 2 ) ).load( testKey ); } @SuppressWarnings( { "unchecked", "rawtypes" } ) @Test public void testActiveCachePreemtivelyReloadsWhenHalfwayToTimeout() throws Exception { long timeout = 500; ActiveCacheLoader<String, String> mockLoader = mock( ActiveCacheLoader.class ); final ExecutorService mockService = mock( ExecutorService.class ); final FutureHolder lastSubmittedFuture = new FutureHolder(); when( mockService.submit( any( Callable.class ) ) ).thenAnswer( new Answer<Future>() { @Override public Future answer( InvocationOnMock invocation ) throws Throwable { lastSubmittedFuture.future = ExecutorUtil.getExecutor().submit( (Callable) invocation.getArguments()[0] ); return lastSubmittedFuture.future; } } ); ActiveCache<String, String> cache = new ActiveCache<String, String>( mockLoader, timeout, new ExecutorServiceGetter() { @Override public ExecutorService getExecutor() { return mockService; } } ); String testKey = "TEST-KEY"; String testResult = "TEST-RESULT"; String testResult2 = "TEST-RESULT-2"; when( mockLoader.load( testKey ) ).thenReturn( testResult ).thenReturn( testResult2 ); assertEquals( testResult, cache.get( testKey ) ); Thread.sleep( 255 ); assertEquals( testResult, cache.get( testKey ) ); lastSubmittedFuture.future.get(); assertEquals( testResult2, cache.get( testKey ) ); verify( mockLoader, times( 2 ) ).load( testKey ); } @Test public void testActiveCacheDoesntCacheExceptions() throws Exception { long timeout = 100; @SuppressWarnings( "unchecked" ) ActiveCacheLoader<String, String> mockLoader = mock( ActiveCacheLoader.class ); ActiveCache<String, String> cache = new ActiveCache<String, String>( mockLoader, timeout ); String testKey = "TEST-KEY"; Exception testResult = new Exception( "TEST-RESULT" ); String testResult2 = "TEST-RESULT-2"; when( mockLoader.load( testKey ) ).thenThrow( testResult ).thenReturn( testResult2 ); try { cache.get( testKey ); fail(); } catch ( Exception e ) { assertEquals( testResult, e ); } assertEquals( testResult2, cache.get( testKey ) ); verify( mockLoader, times( 2 ) ).load( testKey ); }
PurRepositoryConnector implements IRepositoryConnector { @Override public synchronized void disconnect() { if ( serviceManager != null ) { serviceManager.close(); } serviceManager = null; } PurRepositoryConnector( PurRepository purRepository, PurRepositoryMeta repositoryMeta, RootRef rootRef ); synchronized RepositoryConnectResult connect( final String username, final String password ); @Override synchronized void disconnect(); LogChannelInterface getLog(); @Override ServiceManager getServiceManager(); static boolean inProcess(); }
@Test public void testPDI12439PurRepositoryConnectorDoesntNPEAfterMultipleDisconnects() { PurRepository mockPurRepository = mock( PurRepository.class ); PurRepositoryMeta mockPurRepositoryMeta = mock( PurRepositoryMeta.class ); RootRef mockRootRef = mock( RootRef.class ); PurRepositoryConnector purRepositoryConnector = new PurRepositoryConnector( mockPurRepository, mockPurRepositoryMeta, mockRootRef ); purRepositoryConnector.disconnect(); purRepositoryConnector.disconnect(); }
DatabaseDelegate extends AbstractDelegate implements ITransformer, SharedObjectAssembler<DatabaseMeta>, java.io.Serializable { public DataNode elementToDataNode( final RepositoryElementInterface element ) throws KettleException { DatabaseMeta databaseMeta = (DatabaseMeta) element; DataNode rootNode = new DataNode( NODE_ROOT ); rootNode.setProperty( PROP_TYPE, databaseMeta.getPluginId() ); rootNode.setProperty( PROP_CONTYPE, DatabaseMeta.getAccessTypeDesc( databaseMeta.getAccessType() ) ); rootNode.setProperty( PROP_HOST_NAME, databaseMeta.getHostname() ); rootNode.setProperty( PROP_DATABASE_NAME, databaseMeta.getDatabaseName() ); rootNode.setProperty( PROP_PORT, new Long( Const.toInt( databaseMeta.getDatabasePortNumberString(), -1 ) ) ); rootNode.setProperty( PROP_USERNAME, databaseMeta.getUsername() ); rootNode.setProperty( PROP_PASSWORD, Encr.encryptPasswordIfNotUsingVariables( databaseMeta.getPassword() ) ); rootNode.setProperty( PROP_SERVERNAME, databaseMeta.getServername() ); rootNode.setProperty( PROP_DATA_TBS, databaseMeta.getDataTablespace() ); rootNode.setProperty( PROP_INDEX_TBS, databaseMeta.getIndexTablespace() ); DataNode attrNode = rootNode.addNode( NODE_ATTRIBUTES ); Properties attributes = databaseMeta.getAttributes(); Enumeration<Object> keys = databaseMeta.getAttributes().keys(); while ( keys.hasMoreElements() ) { String code = (String) keys.nextElement(); String attribute = (String) attributes.get( code ); String escapedCode = RepositoryFilenameUtils.escape( code, repo.getPur().getReservedChars() ); attrNode.setProperty( escapedCode, attribute ); } return rootNode; } DatabaseDelegate( final PurRepository repo ); DataNode elementToDataNode( final RepositoryElementInterface element ); RepositoryElementInterface dataNodeToElement( final DataNode rootNode ); void dataNodeToElement( final DataNode rootNode, final RepositoryElementInterface element ); Repository getRepository(); DatabaseMeta assemble( RepositoryFile file, NodeRepositoryFileData data, VersionSummary version ); }
@Test public void testExtraOptionEscapeWithInvalidCharInDatabaseType() throws KettleException { DatabaseMeta dbMeta = mock( DatabaseMeta.class ); when( dbMeta.getPluginId() ).thenReturn( "pluginId" ); when( dbMeta.getAccessTypeDesc() ).thenReturn( "Native" ); when( dbMeta.getHostname() ).thenReturn( "AS/400Host" ); when( dbMeta.getDatabaseName() ).thenReturn( "mainframeTable" ); when( dbMeta.getDatabasePortNumberString() ).thenReturn( "1234" ); when( dbMeta.getUsername() ).thenReturn( "testUser" ); when( dbMeta.getPassword() ).thenReturn( "123" ); when( dbMeta.getServername() ).thenReturn( "as400.dot.com" ); when( dbMeta.getDataTablespace() ).thenReturn( "tableSpace" ); when( dbMeta.getIndexTablespace() ).thenReturn( "123" ); Properties extraOptions = new Properties(); extraOptions.setProperty( "EXTRA_OPTION_AS/400.optionExtraOption", "true" ); when( dbMeta.getAttributes() ).thenReturn( extraOptions ); IUnifiedRepository purRepo = mock( IUnifiedRepository.class ); when( purRepo.getReservedChars() ).thenReturn( Arrays.asList( new Character[] { '/' } ) ); when( mockPurRepository.getPur() ).thenReturn( purRepo ); DataNode escapedAttributes = dbDelegate.elementToDataNode( dbMeta ); for ( Iterator<DataNode> iter = escapedAttributes.getNodes().iterator(); iter.hasNext(); ) { DataNode options = iter.next(); assertTrue( "Invalid escaped extra options", options.hasProperty( "EXTRA_OPTION_AS%2F400.optionExtraOption" ) ); assertFalse( "Should not contain un-escaped option", options .hasProperty( "EXTRA_OPTION_AS/400.optionExtraOption" ) ); } }
KettleDatabaseRepositoryMeta extends BaseRepositoryMeta implements RepositoryMeta { @Override public void populate( Map<String, Object> properties, RepositoriesMeta repositoriesMeta ) { super.populate( properties, repositoriesMeta ); String databaseConnection = (String) properties.get( DATABASE_CONNECTION ); DatabaseMeta databaseMeta = repositoriesMeta.searchDatabase( databaseConnection ); if ( databaseMeta != null ) { setConnection( databaseMeta ); } } KettleDatabaseRepositoryMeta(); KettleDatabaseRepositoryMeta( String id, String name, String description, DatabaseMeta connection ); KettleDatabaseRepositoryMeta( String id ); RepositoryCapabilities getRepositoryCapabilities(); void setName( String name ); String getName(); void setDescription( String description ); String getDescription(); Boolean isDefault(); void setDefault( Boolean isDefault ); void setConnection( DatabaseMeta connection ); DatabaseMeta getConnection(); String getXML(); void loadXML( Node repnode, List<DatabaseMeta> databases ); RepositoryMeta clone(); @Override void populate( Map<String, Object> properties, RepositoriesMeta repositoriesMeta ); @SuppressWarnings( "unchecked" ) @Override JSONObject toJSONObject(); static final String ID; static final String DATABASE_CONNECTION; static String REPOSITORY_TYPE_ID; }
@Test public void testPopulate() throws Exception { kettleDatabaseRepositoryMeta.setConnection( databaseMeta ); when( databaseMeta.getName() ).thenReturn( DATABASE_CONNECTION ); when( repositoriesMeta.searchDatabase( DATABASE_CONNECTION ) ).thenReturn( databaseMeta ); Map<String, Object> properties = new HashMap<>(); properties.put( "displayName", NAME ); properties.put( "description", DESCRIPTION ); properties.put( "databaseConnection", DATABASE_CONNECTION ); properties.put( "isDefault", true ); kettleDatabaseRepositoryMeta.populate( properties, repositoriesMeta ); assertEquals( NAME, kettleDatabaseRepositoryMeta.getName() ); assertEquals( DESCRIPTION, kettleDatabaseRepositoryMeta.getDescription() ); assertEquals( DATABASE_CONNECTION, kettleDatabaseRepositoryMeta.getConnection().getName() ); assertEquals( true, kettleDatabaseRepositoryMeta.isDefault() ); }
DatabaseDelegate extends AbstractDelegate implements ITransformer, SharedObjectAssembler<DatabaseMeta>, java.io.Serializable { public RepositoryElementInterface dataNodeToElement( final DataNode rootNode ) throws KettleException { DatabaseMeta databaseMeta = new DatabaseMeta(); dataNodeToElement( rootNode, databaseMeta ); return databaseMeta; } DatabaseDelegate( final PurRepository repo ); DataNode elementToDataNode( final RepositoryElementInterface element ); RepositoryElementInterface dataNodeToElement( final DataNode rootNode ); void dataNodeToElement( final DataNode rootNode, final RepositoryElementInterface element ); Repository getRepository(); DatabaseMeta assemble( RepositoryFile file, NodeRepositoryFileData data, VersionSummary version ); }
@Test public void testExtraOptionUnescapeWithInvalidCharInDatabaseType() throws KettleException { DataNode mockDataNode = mock( DataNode.class ); DataNode unescapedExtraOptions = new DataNode( "options" ); unescapedExtraOptions.setProperty( "EXTRA_OPTION_AS%2F400.optionExtraOption", true ); when( mockDataNode.getNode( "attributes" ) ).thenReturn( unescapedExtraOptions ); DatabaseMeta unescapedDbMeta = mock( DatabaseMeta.class ); when( unescapedDbMeta.getAttributes() ).thenReturn( new Properties() ); dbDelegate.dataNodeToElement( mockDataNode, unescapedDbMeta ); assertEquals( "true", unescapedDbMeta.getAttributes().getProperty( "EXTRA_OPTION_AS/400.optionExtraOption" ) ); }
PurRepositoryMeta extends BaseRepositoryMeta implements RepositoryMeta, java.io.Serializable { @Override public void populate( Map<String, Object> properties, RepositoriesMeta repositoriesMeta ) { super.populate( properties, repositoriesMeta ); String url = (String) properties.get( URL ); PurRepositoryLocation purRepositoryLocation = new PurRepositoryLocation( url ); setRepositoryLocation( purRepositoryLocation ); } PurRepositoryMeta(); PurRepositoryMeta( String id, String name, String description, PurRepositoryLocation repositoryLocation, boolean versionCommentMandatory ); String getXML(); void loadXML( Node repnode, List<DatabaseMeta> databases ); RepositoryCapabilities getRepositoryCapabilities(); PurRepositoryLocation getRepositoryLocation(); void setRepositoryLocation( PurRepositoryLocation repositoryLocation ); boolean isVersionCommentMandatory(); void setVersionCommentMandatory( boolean versionCommentMandatory ); RepositoryMeta clone(); @Override void populate( Map<String, Object> properties, RepositoriesMeta repositoriesMeta ); @SuppressWarnings( "unchecked" ) @Override JSONObject toJSONObject(); static final String URL; static String REPOSITORY_TYPE_ID; }
@Test public void testPopulate() throws Exception { Map<String, Object> properties = new HashMap<String, Object>(); properties.put( "displayName", "Display Name" ); properties.put( "url", "URL" ); properties.put( "description", "Description" ); properties.put( "isDefault", true ); PurRepositoryMeta purRepositoryMeta = new PurRepositoryMeta(); purRepositoryMeta.populate( properties, repositoriesMeta ); assertEquals( "Display Name", purRepositoryMeta.getName() ); assertEquals( "URL", purRepositoryMeta.getRepositoryLocation().getUrl() ); assertEquals( "Description", purRepositoryMeta.getDescription() ); assertEquals( true, purRepositoryMeta.isDefault() ); }
PurRepositorySecurityManager implements IRoleSupportSecurityManager, IUserRoleListChangeListener, java.io.Serializable, RepositorySecurityUserValidator, RepositorySecurityRoleValidator { public void createRole( IRole newRole ) throws KettleException { normalizeRoleInfo( newRole ); if ( !validateRoleInfo( newRole ) ) { throw new KettleException( BaseMessages.getString( PurRepositorySecurityManager.class, "PurRepositorySecurityManager.ERROR_0001_INVALID_NAME" ) ); } userRoleDelegate.createRole( newRole ); } PurRepositorySecurityManager( PurRepository repository, PurRepositoryMeta repositoryMeta, IUser user, ServiceManager serviceManager ); UserRoleDelegate getUserRoleDelegate(); void setUserRoleDelegate( UserRoleDelegate userRoleDelegate ); PurRepository getRepository(); boolean supportsMetadata(); boolean supportsRevisions(); boolean supportsUsers(); void delUser( ObjectId id_user ); ObjectId getUserID( String login ); ObjectId[] getUserIDs(); IUser loadUserInfo( String login ); IUser loadUserInfo( String login, String password ); void renameUser( ObjectId id_user, String newname ); void saveUserInfo( IUser user ); @Override boolean validateUserInfo( IUser user ); @Override void normalizeUserInfo( IUser user ); void createRole( IRole newRole ); @Override boolean validateRoleInfo( IRole role ); @Override void normalizeRoleInfo( IRole role ); void deleteRoles( List<IRole> roles ); void deleteUsers( List<IUser> users ); IRole getRole( String name ); List<IRole> getRoles(); List<IRole> getDefaultRoles(); void updateRole( IRole role ); void updateUser( IUser user ); void delUser( String name ); void deleteRole( String name ); List<IUser> getUsers(); void setRoles( List<IRole> roles ); void setUsers( List<IUser> users ); IRole constructRole(); IUser constructUser(); void onChange(); static Log getLogger(); boolean isManaged(); }
@Test public void createRole_NormalizesInfo_PassesIfNoViolations() throws Exception { IRole info = new EERoleInfo( "role ", "" ); ArgumentCaptor<IRole> captor = ArgumentCaptor.forClass( IRole.class ); manager.createRole( info ); verify( roleDelegate ).createRole( captor.capture() ); info = captor.getValue(); assertEquals( "Spaces should be trimmed", "role", info.getName() ); } @Test( expected = KettleException.class ) public void createRole_NormalizesInfo_FailsIfStillBreaches() throws Exception { IRole info = new EERoleInfo( " ", "" ); manager.createRole( info ); }
PurRepositorySecurityManager implements IRoleSupportSecurityManager, IUserRoleListChangeListener, java.io.Serializable, RepositorySecurityUserValidator, RepositorySecurityRoleValidator { public void saveUserInfo( IUser user ) throws KettleException { normalizeUserInfo( user ); if ( !validateUserInfo( user ) ) { throw new KettleException( BaseMessages.getString( PurRepositorySecurityManager.class, "PurRepositorySecurityManager.ERROR_0001_INVALID_NAME" ) ); } userRoleDelegate.createUser( user ); } PurRepositorySecurityManager( PurRepository repository, PurRepositoryMeta repositoryMeta, IUser user, ServiceManager serviceManager ); UserRoleDelegate getUserRoleDelegate(); void setUserRoleDelegate( UserRoleDelegate userRoleDelegate ); PurRepository getRepository(); boolean supportsMetadata(); boolean supportsRevisions(); boolean supportsUsers(); void delUser( ObjectId id_user ); ObjectId getUserID( String login ); ObjectId[] getUserIDs(); IUser loadUserInfo( String login ); IUser loadUserInfo( String login, String password ); void renameUser( ObjectId id_user, String newname ); void saveUserInfo( IUser user ); @Override boolean validateUserInfo( IUser user ); @Override void normalizeUserInfo( IUser user ); void createRole( IRole newRole ); @Override boolean validateRoleInfo( IRole role ); @Override void normalizeRoleInfo( IRole role ); void deleteRoles( List<IRole> roles ); void deleteUsers( List<IUser> users ); IRole getRole( String name ); List<IRole> getRoles(); List<IRole> getDefaultRoles(); void updateRole( IRole role ); void updateUser( IUser user ); void delUser( String name ); void deleteRole( String name ); List<IUser> getUsers(); void setRoles( List<IRole> roles ); void setUsers( List<IUser> users ); IRole constructRole(); IUser constructUser(); void onChange(); static Log getLogger(); boolean isManaged(); }
@Test public void saveUserInfo_NormalizesInfo_PassesIfNoViolations() throws Exception { IUser info = new UserInfo( "login " ); ArgumentCaptor<IUser> captor = ArgumentCaptor.forClass( IUser.class ); manager.saveUserInfo( info ); verify( roleDelegate ).createUser( captor.capture() ); info = captor.getValue(); assertEquals( "Spaces should be trimmed", "login", info.getLogin() ); } @Test( expected = KettleException.class ) public void saveUserInfo_NormalizesInfo_FailsIfStillBreaches() throws Exception { UserInfo info = new UserInfo( " " ); manager.saveUserInfo( info ); }
JobDelegate extends AbstractDelegate implements ISharedObjectsTransformer, java.io.Serializable { public RepositoryElementInterface dataNodeToElement( final DataNode rootNode ) throws KettleException { JobMeta jobMeta = new JobMeta(); dataNodeToElement( rootNode, jobMeta ); return jobMeta; } JobDelegate( final Repository repo, final IUnifiedRepository pur ); @SuppressWarnings( "unchecked" ) SharedObjects loadSharedObjects( final RepositoryElementInterface element, final Map<RepositoryObjectType, List<? extends SharedObjectInterface>> sharedObjectsByType ); void saveSharedObjects( final RepositoryElementInterface element, final String versionComment ); RepositoryElementInterface dataNodeToElement( final DataNode rootNode ); void dataNodeToElement( final DataNode rootNode, final RepositoryElementInterface element ); DataNode elementToDataNode( final RepositoryElementInterface element ); static final String PROP_USE_LOGFIELD; static final String PROP_PASS_BATCH_ID; static final String PROP_USE_BATCH_ID; static final String PROP_JOB_STATUS; static final String NODE_PARAMETERS; static final String PROP_NR_PARAMETERS; static final String PROP_NR_HOPS; static final String NODE_HOPS; static final String NODE_CUSTOM; static final String PROP_JOBENTRY_TYPE; static final String PROP_PARALLEL; static final String PROP_GUI_DRAW; static final String PROP_GUI_LOCATION_Y; static final String PROP_GUI_LOCATION_X; static final String PROP_NR; static final String PROP_NR_JOB_ENTRY_COPIES; static final String PROP_NR_NOTES; static final String NODE_NOTES; static final String NODE_ENTRIES; }
@Test public void testDataNodeToElementCopiesAttributesToJobEntryCopyAndJobEntry() throws KettleException { IUnifiedRepository mockUnifiedRepository = mock( IUnifiedRepository.class ); JobDelegate jobDelegate = new JobDelegate( mockPurRepository, mockUnifiedRepository ); DataNode mockDataNode = mock( DataNode.class ); DataNode entriesNode = addSubnode( mockDataNode, JobDelegate.NODE_ENTRIES ); DataNode copyNode = mock( DataNode.class ); setNodes( entriesNode, JobDelegate.PROP_NR_JOB_ENTRY_COPIES, Arrays.asList( copyNode ) ); DataNode nodeCustom = addSubnode( copyNode, JobDelegate.NODE_CUSTOM ); DataNode notesNode = addSubnode( mockDataNode, JobDelegate.NODE_NOTES ); DataNode hopsNode = addSubnode( mockDataNode, JobDelegate.NODE_HOPS ); DataNode paramsNode = addSubnode( mockDataNode, JobDelegate.NODE_PARAMETERS ); DataNode groupsNode = addSubnode( copyNode, AttributesMapUtil.NODE_ATTRIBUTE_GROUPS ); DataNode groupNode = mock( DataNode.class ); setNodes( groupsNode, null, Arrays.asList( groupNode ) ); JobMeta mockJobMeta = mock( JobMeta.class ); JobLogTable mockJobLogTable = mock( JobLogTable.class ); List<JobEntryCopy> jobCopies = new ArrayList<JobEntryCopy>(); DataProperty mockDataProperty = mock( DataProperty.class ); List<DataProperty> dataProperties = Arrays.asList( mockDataProperty ); setProperty( mockDataNode, JobDelegate.PROP_JOB_STATUS, 0L ); setProperty( mockDataNode, JobDelegate.PROP_USE_BATCH_ID, false ); setProperty( mockDataNode, JobDelegate.PROP_PASS_BATCH_ID, false ); setProperty( mockDataNode, JobDelegate.PROP_USE_LOGFIELD, false ); setProperty( copyNode, JobDelegate.PROP_JOBENTRY_TYPE, "WRITE_TO_LOG" ); when( copyNode.getId() ).thenReturn( "COPYNODE_ID" ); setProperty( copyNode, JobDelegate.PROP_NR, 0L ); setProperty( copyNode, JobDelegate.PROP_GUI_LOCATION_X, 0L ); setProperty( copyNode, JobDelegate.PROP_GUI_LOCATION_Y, 0L ); setProperty( copyNode, JobDelegate.PROP_GUI_DRAW, false ); setProperty( copyNode, JobDelegate.PROP_PARALLEL, false ); setProperty( nodeCustom, "logmessage_#_0", (String) null ); setNodes( notesNode, JobDelegate.PROP_NR_NOTES, Arrays.<DataNode> asList() ); setNodes( hopsNode, JobDelegate.PROP_NR_HOPS, Arrays.<DataNode> asList() ); setProperty( paramsNode, JobDelegate.PROP_NR_PARAMETERS, 0L ); when( mockJobMeta.getJobCopies() ).thenReturn( jobCopies ); when( mockJobMeta.getJobLogTable() ).thenReturn( mockJobLogTable ); when( groupNode.getName() ).thenReturn( "GROUP_NODE_NAME" ); when( groupNode.getProperties() ).thenReturn( dataProperties ); when( mockDataProperty.getName() ).thenReturn( "MOCK_PROPERTY" ); when( mockDataProperty.getString() ).thenReturn( "MOCK_VALUE" ); jobDelegate.dataNodeToElement( mockDataNode, mockJobMeta ); assertEquals( jobCopies.get( 0 ).getAttributesMap(), ( (JobEntryBase) jobCopies.get( 0 ).getEntry() ) .getAttributesMap() ); }
JobDelegate extends AbstractDelegate implements ISharedObjectsTransformer, java.io.Serializable { public DataNode elementToDataNode( final RepositoryElementInterface element ) throws KettleException { JobMeta jobMeta = (JobMeta) element; DataNode rootNode = new DataNode( NODE_JOB ); if ( jobMeta.getPrivateDatabases() != null ) { String privateDatabaseNames = StringUtils.join( jobMeta.getPrivateDatabases(), JOB_PRIVATE_DATABASE_DELIMITER ); DataNode privateDatabaseNode = rootNode.addNode( NODE_JOB_PRIVATE_DATABASES ); privateDatabaseNode.setProperty( PROP_JOB_PRIVATE_DATABASE_NAMES, privateDatabaseNames ); } DataNode notesNode = rootNode.addNode( NODE_NOTES ); notesNode.setProperty( PROP_NR_NOTES, jobMeta.nrNotes() ); for ( int i = 0; i < jobMeta.nrNotes(); i++ ) { NotePadMeta note = jobMeta.getNote( i ); DataNode noteNode = notesNode.addNode( NOTE_PREFIX + i ); noteNode.setProperty( PROP_XML, note.getXML() ); } if ( log.isDetailed() ) { log.logDetailed( toString(), "Saving " + jobMeta.nrJobEntries() + " Job enty copies to repository..." ); } DataNode entriesNode = rootNode.addNode( NODE_ENTRIES ); entriesNode.setProperty( PROP_NR_JOB_ENTRY_COPIES, jobMeta.nrJobEntries() ); for ( int i = 0; i < jobMeta.nrJobEntries(); i++ ) { JobEntryCopy copy = jobMeta.getJobEntry( i ); JobEntryInterface entry = copy.getEntry(); DataNode copyNode = entriesNode.addNode( sanitizeNodeName( copy.getName() ) + "_" + ( i + 1 ) + EXT_JOB_ENTRY_COPY ); copyNode.setProperty( PROP_NAME, copy.getName() ); copyNode.setProperty( PROP_DESCRIPTION, copy.getDescription() ); copyNode.setProperty( PROP_NR, copy.getNr() ); copyNode.setProperty( PROP_GUI_LOCATION_X, copy.getLocation().x ); copyNode.setProperty( PROP_GUI_LOCATION_Y, copy.getLocation().y ); copyNode.setProperty( PROP_GUI_DRAW, copy.isDrawn() ); copyNode.setProperty( PROP_PARALLEL, copy.isLaunchingInParallel() ); if ( entry instanceof JobEntryBase ) { AttributesMapUtil.saveAttributesMap( copyNode, (JobEntryBase) entry ); } AttributesMapUtil.saveAttributesMap( copyNode, copy ); copyNode.setProperty( PROP_JOBENTRY_TYPE, entry.getPluginId() ); DataNode customNode = new DataNode( NODE_CUSTOM ); RepositoryProxy proxy = new RepositoryProxy( customNode ); entry.saveRep( proxy, proxy.getMetaStore(), null ); compatibleEntrySaveRep( entry, proxy, null ); copyNode.addNode( customNode ); } DataNode hopsNode = rootNode.addNode( NODE_HOPS ); hopsNode.setProperty( PROP_NR_HOPS, jobMeta.nrJobHops() ); for ( int i = 0; i < jobMeta.nrJobHops(); i++ ) { JobHopMeta hop = jobMeta.getJobHop( i ); DataNode hopNode = hopsNode.addNode( JOB_HOP_PREFIX + i ); hopNode.setProperty( JOB_HOP_FROM, hop.getFromEntry().getName() ); hopNode.setProperty( JOB_HOP_FROM_NR, hop.getFromEntry().getNr() ); hopNode.setProperty( JOB_HOP_TO, hop.getToEntry().getName() ); hopNode.setProperty( JOB_HOP_TO_NR, hop.getToEntry().getNr() ); hopNode.setProperty( JOB_HOP_ENABLED, hop.isEnabled() ); hopNode.setProperty( JOB_HOP_EVALUATION, hop.getEvaluation() ); hopNode.setProperty( JOB_HOP_UNCONDITIONAL, hop.isUnconditional() ); } String[] paramKeys = jobMeta.listParameters(); DataNode paramsNode = rootNode.addNode( NODE_PARAMETERS ); paramsNode.setProperty( PROP_NR_PARAMETERS, paramKeys == null ? 0 : paramKeys.length ); for ( int idx = 0; idx < paramKeys.length; idx++ ) { DataNode paramNode = paramsNode.addNode( PARAM_PREFIX + idx ); String key = paramKeys[idx]; String description = jobMeta.getParameterDescription( paramKeys[idx] ); String defaultValue = jobMeta.getParameterDefault( paramKeys[idx] ); paramNode.setProperty( PARAM_KEY, key != null ? key : "" ); paramNode.setProperty( PARAM_DEFAULT, defaultValue != null ? defaultValue : "" ); paramNode.setProperty( PARAM_DESC, description != null ? description : "" ); } saveJobDetails( rootNode, jobMeta ); return rootNode; } JobDelegate( final Repository repo, final IUnifiedRepository pur ); @SuppressWarnings( "unchecked" ) SharedObjects loadSharedObjects( final RepositoryElementInterface element, final Map<RepositoryObjectType, List<? extends SharedObjectInterface>> sharedObjectsByType ); void saveSharedObjects( final RepositoryElementInterface element, final String versionComment ); RepositoryElementInterface dataNodeToElement( final DataNode rootNode ); void dataNodeToElement( final DataNode rootNode, final RepositoryElementInterface element ); DataNode elementToDataNode( final RepositoryElementInterface element ); static final String PROP_USE_LOGFIELD; static final String PROP_PASS_BATCH_ID; static final String PROP_USE_BATCH_ID; static final String PROP_JOB_STATUS; static final String NODE_PARAMETERS; static final String PROP_NR_PARAMETERS; static final String PROP_NR_HOPS; static final String NODE_HOPS; static final String NODE_CUSTOM; static final String PROP_JOBENTRY_TYPE; static final String PROP_PARALLEL; static final String PROP_GUI_DRAW; static final String PROP_GUI_LOCATION_Y; static final String PROP_GUI_LOCATION_X; static final String PROP_NR; static final String PROP_NR_JOB_ENTRY_COPIES; static final String PROP_NR_NOTES; static final String NODE_NOTES; static final String NODE_ENTRIES; }
@Test public void testElementToDataNodeSavesCopyAttributes() throws KettleException { JobMeta mockJobMeta = mock( JobMeta.class ); IUnifiedRepository mockUnifiedRepository = mock( IUnifiedRepository.class ); JobDelegate jobDelegate = new JobDelegate( mockPurRepository, mockUnifiedRepository ); JobLogTable mockJobLogTable = mock( JobLogTable.class ); JobEntryCopy mockJobEntryCopy = mock( JobEntryCopy.class ); Map<String, Map<String, String>> attributes = new HashMap<String, Map<String, String>>(); Map<String, String> group = new HashMap<String, String>(); final String mockGroup = "MOCK_GROUP"; final String mockProperty = "MOCK_PROPERTY"; final String mockValue = "MOCK_VALUE"; group.put( mockProperty, mockValue ); attributes.put( mockGroup, group ); when( mockJobEntryCopy.getAttributesMap() ).thenReturn( attributes ); JobEntryBaseAndInterface mockJobEntry = mock( JobEntryBaseAndInterface.class ); when( mockJobMeta.listParameters() ).thenReturn( new String[] {} ); when( mockJobMeta.getJobLogTable() ).thenReturn( mockJobLogTable ); when( mockJobMeta.nrJobEntries() ).thenReturn( 1 ); when( mockJobMeta.getJobEntry( 0 ) ).thenReturn( mockJobEntryCopy ); when( mockJobEntryCopy.getName() ).thenReturn( "MOCK_NAME" ); when( mockJobEntryCopy.getLocation() ).thenReturn( new Point( 0, 0 ) ); when( mockJobEntryCopy.getEntry() ).thenReturn( mockJobEntry ); DataNode dataNode = jobDelegate.elementToDataNode( mockJobMeta ); DataNode groups = dataNode.getNode( "entries" ).getNodes().iterator().next().getNode( AttributesMapUtil.NODE_ATTRIBUTE_GROUPS ); DataNode mockGroupNode = groups.getNode( mockGroup ); assertEquals( mockValue, mockGroupNode.getProperty( mockProperty ).getString() ); } @Test public void testElementToDataNodeSavesAttributes() throws KettleException { JobMeta mockJobMeta = mock( JobMeta.class ); IUnifiedRepository mockUnifiedRepository = mock( IUnifiedRepository.class ); JobDelegate jobDelegate = new JobDelegate( mockPurRepository, mockUnifiedRepository ); JobLogTable mockJobLogTable = mock( JobLogTable.class ); JobEntryCopy mockJobEntryCopy = mock( JobEntryCopy.class ); Map<String, Map<String, String>> attributes = new HashMap<String, Map<String, String>>(); Map<String, String> group = new HashMap<String, String>(); final String mockGroup = "MOCK_GROUP"; final String mockProperty = "MOCK_PROPERTY"; final String mockValue = "MOCK_VALUE"; group.put( mockProperty, mockValue ); attributes.put( mockGroup, group ); JobEntryBaseAndInterface mockJobEntry = mock( JobEntryBaseAndInterface.class ); when( mockJobEntry.getAttributesMap() ).thenReturn( attributes ); when( mockJobMeta.listParameters() ).thenReturn( new String[] {} ); when( mockJobMeta.getJobLogTable() ).thenReturn( mockJobLogTable ); when( mockJobMeta.nrJobEntries() ).thenReturn( 1 ); when( mockJobMeta.getJobEntry( 0 ) ).thenReturn( mockJobEntryCopy ); when( mockJobEntryCopy.getName() ).thenReturn( "MOCK_NAME" ); when( mockJobEntryCopy.getLocation() ).thenReturn( new Point( 0, 0 ) ); when( mockJobEntryCopy.getEntry() ).thenReturn( mockJobEntry ); DataNode dataNode = jobDelegate.elementToDataNode( mockJobMeta ); DataNode groups = dataNode.getNode( "entries" ).getNodes().iterator().next().getNode( AttributesMapUtil.NODE_ATTRIBUTE_GROUPS ); DataNode mockGroupNode = groups.getNode( mockGroup ); assertEquals( mockValue, mockGroupNode.getProperty( mockProperty ).getString() ); }
UIEEJob extends UIJob implements ILockObject, IRevisionObject, IAclObject, java.io.Serializable { @Override public String getImage() { if ( isLocked() ) { return "ui/images/lock.svg"; } return "ui/images/jobrepo.svg"; } UIEEJob( RepositoryElementMetaInterface rc, UIRepositoryDirectory parent, Repository rep ); @Override String getImage(); String getLockMessage(); void lock( String lockNote ); void unlock(); boolean isLocked(); RepositoryLock getRepositoryLock(); UIRepositoryObjectRevisions getRevisions(); void restoreRevision( UIRepositoryObjectRevision revision, String commitMessage ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); @Override Boolean getVersioningEnabled(); @Override Boolean getVersionCommentEnabled(); }
@Test public void testGetImage() { String image = uiJob.getImage(); assertNotNull( image ); File f = new File( image ); when( mockEERepositoryObject.getLock() ).thenReturn( mockRepositoryLock ); String image2 = uiJob.getImage(); assertNotNull( image2 ); f = new File( image2 ); assertNotEquals( image, image2 ); }
RepositoryExporter implements IRepositoryExporterFeedback { @Override public List<ExportFeedback> exportAllObjectsWithFeedback( ProgressMonitorListener monitorOuter, String xmlFilename, RepositoryDirectoryInterface root, String exportType ) throws KettleException { return exportAllObjectsInternal( monitorOuter, xmlFilename, root, exportType, true ); } RepositoryExporter( Repository repository ); @Override boolean isRulesViolation(); @Override void setImportRulesToValidate( ImportRules importRules ); @Override List<ExportFeedback> exportAllObjectsWithFeedback( ProgressMonitorListener monitorOuter, String xmlFilename, RepositoryDirectoryInterface root, String exportType ); @Override void exportAllObjects( ProgressMonitorListener monitorOuter, String xmlFilename, RepositoryDirectoryInterface root, String exportType ); }
@Test public void testExportJobsWithFeedback() throws Exception { RepositoryExporter exporter = new RepositoryExporter( repository ); List<ExportFeedback> feedback = exporter.exportAllObjectsWithFeedback( null, xmlFileName, root, RepositoryExporter.ExportType.JOBS.toString() ); Assert.assertEquals( "Feedback contains all items recorded", 2, feedback.size() ); ExportFeedback fb = feedback.get( 1 ); Assert.assertEquals( "Job1 was exproted", "job1", fb.getItemName() ); Assert.assertEquals( "Repository path for Job1 is specified", "path", fb.getItemPath() ); String res = this.validateXmlFile( fileObject.getContent().getInputStream(), " Assert.assertEquals( "Export xml contains exported job xml", "found", res ); } @Test public void testExportTransformationsWithFeedback() throws Exception { RepositoryExporter exporter = new RepositoryExporter( repository ); List<ExportFeedback> feedback = exporter.exportAllObjectsWithFeedback( null, xmlFileName, root, RepositoryExporter.ExportType.TRANS.toString() ); Assert.assertEquals( "Feedback contains all items recorded", 2, feedback.size() ); ExportFeedback fb = feedback.get( 1 ); Assert.assertEquals( "Job1 was exproted", "trans1", fb.getItemName() ); Assert.assertEquals( "Repository path for Job1 is specified", "path", fb.getItemPath() ); String res = this.validateXmlFile( fileObject.getContent().getInputStream(), " Assert.assertEquals( "Export xml contains exported job xml", "found", res ); }
UIEEJob extends UIJob implements ILockObject, IRevisionObject, IAclObject, java.io.Serializable { public String getLockMessage() throws KettleException { return repObj.getLockMessage(); } UIEEJob( RepositoryElementMetaInterface rc, UIRepositoryDirectory parent, Repository rep ); @Override String getImage(); String getLockMessage(); void lock( String lockNote ); void unlock(); boolean isLocked(); RepositoryLock getRepositoryLock(); UIRepositoryObjectRevisions getRevisions(); void restoreRevision( UIRepositoryObjectRevision revision, String commitMessage ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); @Override Boolean getVersioningEnabled(); @Override Boolean getVersionCommentEnabled(); }
@Test public void testGetLockMessage() throws Exception { when( mockEERepositoryObject.getLockMessage() ).thenReturn( LOCK_MESSAGE ); assertEquals( LOCK_MESSAGE, uiJob.getLockMessage() ); }
UIEEJob extends UIJob implements ILockObject, IRevisionObject, IAclObject, java.io.Serializable { public void lock( String lockNote ) throws KettleException { RepositoryLock lock = lockService.lockJob( getObjectId(), lockNote ); repObj.setLock( lock ); uiParent.fireCollectionChanged(); } UIEEJob( RepositoryElementMetaInterface rc, UIRepositoryDirectory parent, Repository rep ); @Override String getImage(); String getLockMessage(); void lock( String lockNote ); void unlock(); boolean isLocked(); RepositoryLock getRepositoryLock(); UIRepositoryObjectRevisions getRevisions(); void restoreRevision( UIRepositoryObjectRevision revision, String commitMessage ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); @Override Boolean getVersioningEnabled(); @Override Boolean getVersionCommentEnabled(); }
@Test public void testLock() throws Exception { when( mockLockService.lockJob( mockObjectId, LOCK_NOTE ) ).thenReturn( mockRepositoryLock ); uiJob.lock( LOCK_NOTE ); verify( mockEERepositoryObject ).setLock( mockRepositoryLock ); verify( mockParent ).fireCollectionChanged(); uiJob.unlock(); verify( mockEERepositoryObject ).setLock( null ); verify( mockParent, times( 2 ) ).fireCollectionChanged(); }
UIEEJob extends UIJob implements ILockObject, IRevisionObject, IAclObject, java.io.Serializable { @Override public boolean hasAccess( RepositoryFilePermission perm ) throws KettleException { if ( hasAccess == null ) { hasAccess = new HashMap<RepositoryFilePermission, Boolean>(); } if ( hasAccess.get( perm ) == null ) { hasAccess.put( perm, new Boolean( aclService.hasAccess( repObj.getObjectId(), perm ) ) ); } return hasAccess.get( perm ).booleanValue(); } UIEEJob( RepositoryElementMetaInterface rc, UIRepositoryDirectory parent, Repository rep ); @Override String getImage(); String getLockMessage(); void lock( String lockNote ); void unlock(); boolean isLocked(); RepositoryLock getRepositoryLock(); UIRepositoryObjectRevisions getRevisions(); void restoreRevision( UIRepositoryObjectRevision revision, String commitMessage ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); @Override Boolean getVersioningEnabled(); @Override Boolean getVersionCommentEnabled(); }
@Test public void testAccess() throws Exception { when( mockAclService.hasAccess( mockObjectId, RepositoryFilePermission.READ ) ).thenReturn( true ); when( mockAclService.hasAccess( mockObjectId, RepositoryFilePermission.WRITE ) ).thenReturn( false ); assertTrue( uiJob.hasAccess( RepositoryFilePermission.READ ) ); assertFalse( uiJob.hasAccess( RepositoryFilePermission.WRITE ) ); }
UIRepositoryObjectAcl extends XulEventSourceAdapter implements java.io.Serializable { public EnumSet<RepositoryFilePermission> getPermissionSet() { return ace.getPermissions(); } UIRepositoryObjectAcl( ObjectAce ace ); @Override boolean equals( Object obj ); ObjectAce getAce(); String getRecipientName(); void setRecipientName( String recipientName ); ObjectRecipient.Type getRecipientType(); void setRecipientType( ObjectRecipient.Type recipientType ); EnumSet<RepositoryFilePermission> getPermissionSet(); void setPermissionSet( RepositoryFilePermission first, RepositoryFilePermission... rest ); void setPermissionSet( EnumSet<RepositoryFilePermission> permissionSet ); void addPermission( RepositoryFilePermission permissionToAdd ); void removePermission( RepositoryFilePermission permissionToRemove ); @Override String toString(); }
@Test public void testGetPermissionSet() { UIRepositoryObjectAcl uiAcl = new UIRepositoryObjectAcl( createObjectAce() ); EnumSet<RepositoryFilePermission> permissions = uiAcl.getPermissionSet(); assertNotNull( permissions ); assertEquals( 1, permissions.size() ); assertTrue( permissions.contains( RepositoryFilePermission.ALL ) ); }
UIRepositoryObjectAcl extends XulEventSourceAdapter implements java.io.Serializable { @Override public boolean equals( Object obj ) { if ( obj == null ) { return false; } UIRepositoryObjectAcl acl = (UIRepositoryObjectAcl) obj; return ace.equals( acl.getAce() ); } UIRepositoryObjectAcl( ObjectAce ace ); @Override boolean equals( Object obj ); ObjectAce getAce(); String getRecipientName(); void setRecipientName( String recipientName ); ObjectRecipient.Type getRecipientType(); void setRecipientType( ObjectRecipient.Type recipientType ); EnumSet<RepositoryFilePermission> getPermissionSet(); void setPermissionSet( RepositoryFilePermission first, RepositoryFilePermission... rest ); void setPermissionSet( EnumSet<RepositoryFilePermission> permissionSet ); void addPermission( RepositoryFilePermission permissionToAdd ); void removePermission( RepositoryFilePermission permissionToRemove ); @Override String toString(); }
@Test public void testEquals() { UIRepositoryObjectAcl uiAcl1 = new UIRepositoryObjectAcl( createObjectAce() ); UIRepositoryObjectAcl uiAcl2 = new UIRepositoryObjectAcl( new RepositoryObjectAce( new RepositoryObjectRecipient( RECIPIENT1, ObjectRecipient.Type.USER ), EnumSet.of( RepositoryFilePermission.ALL ) ) ); assertTrue( uiAcl1.equals( uiAcl2 ) ); uiAcl2 = new UIRepositoryObjectAcl( new RepositoryObjectAce( new RepositoryObjectRecipient( RECIPIENT1, ObjectRecipient.Type.SYSTEM_ROLE ), EnumSet.of( RepositoryFilePermission.ALL ) ) ); assertFalse( uiAcl1.equals( uiAcl2 ) ); uiAcl2 = new UIRepositoryObjectAcl( new RepositoryObjectAce( new RepositoryObjectRecipient( RECIPIENT1, ObjectRecipient.Type.USER ), EnumSet.of( RepositoryFilePermission.READ, RepositoryFilePermission.ALL ) ) ); assertFalse( uiAcl1.equals( uiAcl2 ) ); uiAcl2 = new UIRepositoryObjectAcl( new RepositoryObjectAce( new RepositoryObjectRecipient( RECIPIENT2, ObjectRecipient.Type.USER ), EnumSet.of( RepositoryFilePermission.ALL ) ) ); assertFalse( uiAcl1.equals( uiAcl2 ) ); assertFalse( uiAcl1.equals( null ) ); }
UIRepositoryObjectAcl extends XulEventSourceAdapter implements java.io.Serializable { @Override public String toString() { return ace.getRecipient().toString(); } UIRepositoryObjectAcl( ObjectAce ace ); @Override boolean equals( Object obj ); ObjectAce getAce(); String getRecipientName(); void setRecipientName( String recipientName ); ObjectRecipient.Type getRecipientType(); void setRecipientType( ObjectRecipient.Type recipientType ); EnumSet<RepositoryFilePermission> getPermissionSet(); void setPermissionSet( RepositoryFilePermission first, RepositoryFilePermission... rest ); void setPermissionSet( EnumSet<RepositoryFilePermission> permissionSet ); void addPermission( RepositoryFilePermission permissionToAdd ); void removePermission( RepositoryFilePermission permissionToRemove ); @Override String toString(); }
@Test public void testToString() { UIRepositoryObjectAcl uiAcl = new UIRepositoryObjectAcl( createObjectAce() ); String s = uiAcl.toString(); assertNotNull( s ); assertTrue( s.contains( RECIPIENT1 ) ); }
UIEETransformation extends UITransformation implements ILockObject, IRevisionObject, IAclObject, java.io.Serializable { @Override public String getImage() { try { if ( isLocked() ) { return "ui/images/lock.svg"; } } catch ( KettleException e ) { throw new RuntimeException( e ); } return "ui/images/transrepo.svg"; } UIEETransformation( RepositoryElementMetaInterface rc, UIRepositoryDirectory parent, Repository rep ); @Override String getImage(); String getLockMessage(); void lock( String lockNote ); void unlock(); boolean isLocked(); RepositoryLock getRepositoryLock(); UIRepositoryObjectRevisions getRevisions(); void restoreRevision( UIRepositoryObjectRevision revision, String commitMessage ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); @Override Boolean getVersioningEnabled(); @Override Boolean getVersionCommentEnabled(); }
@Test public void testGetImage() { String image = uiTransformation.getImage(); assertNotNull( image ); File f = new File( image ); when( mockEERepositoryObject.isLocked() ).thenReturn( true ); String image2 = uiTransformation.getImage(); assertNotNull( image2 ); f = new File( image2 ); assertNotEquals( image, image2 ); }
UIEETransformation extends UITransformation implements ILockObject, IRevisionObject, IAclObject, java.io.Serializable { public String getLockMessage() throws KettleException { return repObj.getLockMessage(); } UIEETransformation( RepositoryElementMetaInterface rc, UIRepositoryDirectory parent, Repository rep ); @Override String getImage(); String getLockMessage(); void lock( String lockNote ); void unlock(); boolean isLocked(); RepositoryLock getRepositoryLock(); UIRepositoryObjectRevisions getRevisions(); void restoreRevision( UIRepositoryObjectRevision revision, String commitMessage ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); @Override Boolean getVersioningEnabled(); @Override Boolean getVersionCommentEnabled(); }
@Test public void testGetLockMessage() throws Exception { when( mockEERepositoryObject.getLockMessage() ).thenReturn( LOCK_MESSAGE ); assertEquals( LOCK_MESSAGE, uiTransformation.getLockMessage() ); }
UIEETransformation extends UITransformation implements ILockObject, IRevisionObject, IAclObject, java.io.Serializable { public void lock( String lockNote ) throws KettleException { RepositoryLock lock = lockService.lockTransformation( getObjectId(), lockNote ); repObj.setLock( lock ); uiParent.fireCollectionChanged(); } UIEETransformation( RepositoryElementMetaInterface rc, UIRepositoryDirectory parent, Repository rep ); @Override String getImage(); String getLockMessage(); void lock( String lockNote ); void unlock(); boolean isLocked(); RepositoryLock getRepositoryLock(); UIRepositoryObjectRevisions getRevisions(); void restoreRevision( UIRepositoryObjectRevision revision, String commitMessage ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); @Override Boolean getVersioningEnabled(); @Override Boolean getVersionCommentEnabled(); }
@Test public void testLock() throws Exception { when( mockLockService.lockTransformation( mockObjectId, LOCK_NOTE ) ).thenReturn( mockRepositoryLock ); uiTransformation.lock( LOCK_NOTE ); verify( mockEERepositoryObject ).setLock( mockRepositoryLock ); verify( mockParent ).fireCollectionChanged(); uiTransformation.unlock(); verify( mockEERepositoryObject ).setLock( null ); verify( mockParent, times( 2 ) ).fireCollectionChanged(); }
UIEETransformation extends UITransformation implements ILockObject, IRevisionObject, IAclObject, java.io.Serializable { @Override public boolean hasAccess( RepositoryFilePermission perm ) throws KettleException { if ( hasAccess == null ) { hasAccess = new HashMap<RepositoryFilePermission, Boolean>(); } if ( hasAccess.get( perm ) == null ) { hasAccess.put( perm, new Boolean( aclService.hasAccess( getObjectId(), perm ) ) ); } return hasAccess.get( perm ).booleanValue(); } UIEETransformation( RepositoryElementMetaInterface rc, UIRepositoryDirectory parent, Repository rep ); @Override String getImage(); String getLockMessage(); void lock( String lockNote ); void unlock(); boolean isLocked(); RepositoryLock getRepositoryLock(); UIRepositoryObjectRevisions getRevisions(); void restoreRevision( UIRepositoryObjectRevision revision, String commitMessage ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); @Override Boolean getVersioningEnabled(); @Override Boolean getVersionCommentEnabled(); }
@Test public void testAccess() throws Exception { when( mockAclService.hasAccess( mockObjectId, RepositoryFilePermission.READ ) ).thenReturn( true ); when( mockAclService.hasAccess( mockObjectId, RepositoryFilePermission.WRITE ) ).thenReturn( false ); assertTrue( uiTransformation.hasAccess( RepositoryFilePermission.READ ) ); assertFalse( uiTransformation.hasAccess( RepositoryFilePermission.WRITE ) ); }
UIRepositoryObjectAcls extends XulEventSourceAdapter implements java.io.Serializable { public void setObjectAcl( ObjectAcl obj ) { this.obj = obj; this.firePropertyChange( "acls", null, getAcls() ); this.firePropertyChange( "entriesInheriting", null, isEntriesInheriting() ); } UIRepositoryObjectAcls(); void setObjectAcl( ObjectAcl obj ); ObjectAcl getObjectAcl(); List<UIRepositoryObjectAcl> getAcls(); void setAcls( List<UIRepositoryObjectAcl> acls ); void addAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addDefaultAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addAcl( UIRepositoryObjectAcl aclToAdd ); void addDefaultAcl( UIRepositoryObjectAcl aclToAdd ); void removeAcls( List<UIRepositoryObjectAcl> aclsToRemove ); void removeAcl( String recipientName ); void removeSelectedAcls(); void updateAcl( UIRepositoryObjectAcl aclToUpdate ); UIRepositoryObjectAcl getAcl( String recipient ); List<UIRepositoryObjectAcl> getSelectedAclList(); void setSelectedAclList( List<UIRepositoryObjectAcl> list ); boolean isEntriesInheriting(); void setEntriesInheriting( boolean entriesInheriting ); ObjectRecipient getOwner(); void setRemoveEnabled( boolean removeEnabled ); boolean isRemoveEnabled(); int getAceIndex( ObjectAce ace ); ObjectAce getAceAtIndex( int index ); void setModelDirty( boolean modelDirty ); boolean isModelDirty(); boolean hasManageAclAccess(); void setHasManageAclAccess( boolean hasManageAclAccess ); void clear(); }
@Test public void testSetObjectAcl() { ObjectAcl objectAcl = repositoryObjectAcls.getObjectAcl(); assertEquals( repObjectAcl, objectAcl ); }
UIRepositoryObjectAcls extends XulEventSourceAdapter implements java.io.Serializable { public void updateAcl( UIRepositoryObjectAcl aclToUpdate ) { List<ObjectAce> aces = obj.getAces(); for ( ObjectAce ace : aces ) { if ( ace.getRecipient().getName().equals( aclToUpdate.getRecipientName() ) ) { ace.setPermissions( aclToUpdate.getPermissionSet() ); } } UIRepositoryObjectAcl acl = getAcl( aclToUpdate.getRecipientName() ); acl.setPermissionSet( aclToUpdate.getPermissionSet() ); this.firePropertyChange( "acls", null, getAcls() ); selectedAclList.clear(); List<UIRepositoryObjectAcl> aclList = new ArrayList<UIRepositoryObjectAcl>(); aclList.add( aclToUpdate ); setSelectedAclList( aclList ); setModelDirty( true ); } UIRepositoryObjectAcls(); void setObjectAcl( ObjectAcl obj ); ObjectAcl getObjectAcl(); List<UIRepositoryObjectAcl> getAcls(); void setAcls( List<UIRepositoryObjectAcl> acls ); void addAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addDefaultAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addAcl( UIRepositoryObjectAcl aclToAdd ); void addDefaultAcl( UIRepositoryObjectAcl aclToAdd ); void removeAcls( List<UIRepositoryObjectAcl> aclsToRemove ); void removeAcl( String recipientName ); void removeSelectedAcls(); void updateAcl( UIRepositoryObjectAcl aclToUpdate ); UIRepositoryObjectAcl getAcl( String recipient ); List<UIRepositoryObjectAcl> getSelectedAclList(); void setSelectedAclList( List<UIRepositoryObjectAcl> list ); boolean isEntriesInheriting(); void setEntriesInheriting( boolean entriesInheriting ); ObjectRecipient getOwner(); void setRemoveEnabled( boolean removeEnabled ); boolean isRemoveEnabled(); int getAceIndex( ObjectAce ace ); ObjectAce getAceAtIndex( int index ); void setModelDirty( boolean modelDirty ); boolean isModelDirty(); boolean hasManageAclAccess(); void setHasManageAclAccess( boolean hasManageAclAccess ); void clear(); }
@Test public void testUpdateAcl() { List<UIRepositoryObjectAcl> originalUIAcls = Arrays.asList( new UIRepositoryObjectAcl[] { objectAcl1, objectAcl2 } ); repositoryObjectAcls.addAcls( originalUIAcls ); objectAcl2.addPermission( RepositoryFilePermission.DELETE ); repositoryObjectAcls.updateAcl( objectAcl2 ); for ( UIRepositoryObjectAcl uiAcl : repositoryObjectAcls.getAcls() ) { if ( objectAcl2.getRecipientName().equals( uiAcl.getRecipientName() ) ) { assertEquals( "Delete permission was not added", objectAcl2.getPermissionSet(), uiAcl.getPermissionSet() ); } } }
UIRepositoryObjectAcls extends XulEventSourceAdapter implements java.io.Serializable { public void clear() { setRemoveEnabled( false ); setModelDirty( false ); setAcls( null ); setSelectedAclList( null ); setHasManageAclAccess( false ); } UIRepositoryObjectAcls(); void setObjectAcl( ObjectAcl obj ); ObjectAcl getObjectAcl(); List<UIRepositoryObjectAcl> getAcls(); void setAcls( List<UIRepositoryObjectAcl> acls ); void addAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addDefaultAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addAcl( UIRepositoryObjectAcl aclToAdd ); void addDefaultAcl( UIRepositoryObjectAcl aclToAdd ); void removeAcls( List<UIRepositoryObjectAcl> aclsToRemove ); void removeAcl( String recipientName ); void removeSelectedAcls(); void updateAcl( UIRepositoryObjectAcl aclToUpdate ); UIRepositoryObjectAcl getAcl( String recipient ); List<UIRepositoryObjectAcl> getSelectedAclList(); void setSelectedAclList( List<UIRepositoryObjectAcl> list ); boolean isEntriesInheriting(); void setEntriesInheriting( boolean entriesInheriting ); ObjectRecipient getOwner(); void setRemoveEnabled( boolean removeEnabled ); boolean isRemoveEnabled(); int getAceIndex( ObjectAce ace ); ObjectAce getAceAtIndex( int index ); void setModelDirty( boolean modelDirty ); boolean isModelDirty(); boolean hasManageAclAccess(); void setHasManageAclAccess( boolean hasManageAclAccess ); void clear(); }
@Test public void testClear() { List<UIRepositoryObjectAcl> originalUIAcls = Arrays.asList( new UIRepositoryObjectAcl[] { objectAcl1, objectAcl2, objectAcl3 } ); repositoryObjectAcls.addAcls( originalUIAcls ); repositoryObjectAcls.setRemoveEnabled( true ); assertTrue( repositoryObjectAcls.isRemoveEnabled() ); repositoryObjectAcls.clear(); assertTrue( repositoryObjectAcls.getSelectedAclList().isEmpty() ); assertFalse( repositoryObjectAcls.isRemoveEnabled() ); assertFalse( repositoryObjectAcls.isModelDirty() ); assertTrue( repositoryObjectAcls.isEntriesInheriting() ); }
UIRepositoryObjectAcls extends XulEventSourceAdapter implements java.io.Serializable { public ObjectRecipient getOwner() { if ( obj != null ) { return obj.getOwner(); } else { return null; } } UIRepositoryObjectAcls(); void setObjectAcl( ObjectAcl obj ); ObjectAcl getObjectAcl(); List<UIRepositoryObjectAcl> getAcls(); void setAcls( List<UIRepositoryObjectAcl> acls ); void addAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addDefaultAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addAcl( UIRepositoryObjectAcl aclToAdd ); void addDefaultAcl( UIRepositoryObjectAcl aclToAdd ); void removeAcls( List<UIRepositoryObjectAcl> aclsToRemove ); void removeAcl( String recipientName ); void removeSelectedAcls(); void updateAcl( UIRepositoryObjectAcl aclToUpdate ); UIRepositoryObjectAcl getAcl( String recipient ); List<UIRepositoryObjectAcl> getSelectedAclList(); void setSelectedAclList( List<UIRepositoryObjectAcl> list ); boolean isEntriesInheriting(); void setEntriesInheriting( boolean entriesInheriting ); ObjectRecipient getOwner(); void setRemoveEnabled( boolean removeEnabled ); boolean isRemoveEnabled(); int getAceIndex( ObjectAce ace ); ObjectAce getAceAtIndex( int index ); void setModelDirty( boolean modelDirty ); boolean isModelDirty(); boolean hasManageAclAccess(); void setHasManageAclAccess( boolean hasManageAclAccess ); void clear(); }
@Test public void testGetOwner() { assertEquals( RECIPIENT0, repositoryObjectAcls.getOwner().getName() ); repositoryObjectAcls = new UIRepositoryObjectAcls(); assertNull( repositoryObjectAcls.getOwner() ); }
UIRepositoryObjectAcls extends XulEventSourceAdapter implements java.io.Serializable { public int getAceIndex( ObjectAce ace ) { List<ObjectAce> aceList = obj.getAces(); for ( int i = 0; i < aceList.size(); i++ ) { if ( ace.equals( aceList.get( i ) ) ) { return i; } } return -1; } UIRepositoryObjectAcls(); void setObjectAcl( ObjectAcl obj ); ObjectAcl getObjectAcl(); List<UIRepositoryObjectAcl> getAcls(); void setAcls( List<UIRepositoryObjectAcl> acls ); void addAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addDefaultAcls( List<UIRepositoryObjectAcl> aclsToAdd ); void addAcl( UIRepositoryObjectAcl aclToAdd ); void addDefaultAcl( UIRepositoryObjectAcl aclToAdd ); void removeAcls( List<UIRepositoryObjectAcl> aclsToRemove ); void removeAcl( String recipientName ); void removeSelectedAcls(); void updateAcl( UIRepositoryObjectAcl aclToUpdate ); UIRepositoryObjectAcl getAcl( String recipient ); List<UIRepositoryObjectAcl> getSelectedAclList(); void setSelectedAclList( List<UIRepositoryObjectAcl> list ); boolean isEntriesInheriting(); void setEntriesInheriting( boolean entriesInheriting ); ObjectRecipient getOwner(); void setRemoveEnabled( boolean removeEnabled ); boolean isRemoveEnabled(); int getAceIndex( ObjectAce ace ); ObjectAce getAceAtIndex( int index ); void setModelDirty( boolean modelDirty ); boolean isModelDirty(); boolean hasManageAclAccess(); void setHasManageAclAccess( boolean hasManageAclAccess ); void clear(); }
@Test public void testGetAceIndex() { List<UIRepositoryObjectAcl> originalUIAcls = Arrays.asList( new UIRepositoryObjectAcl[] { objectAcl1, objectAcl2, objectAcl3 } ); repositoryObjectAcls.addAcls( originalUIAcls ); int i = repositoryObjectAcls.getAceIndex( objectAcl2.getAce() ); assertTrue( objectAcl2.equals( repositoryObjectAcls.getAcls().get( i ) ) ); }
UIEERepositoryDirectory extends UIRepositoryDirectory implements IAclObject, java.io.Serializable { public void delete( boolean deleteHomeDirectories ) throws Exception { if ( rep instanceof RepositoryExtended ) { ( (RepositoryExtended) rep ).deleteRepositoryDirectory( getDirectory(), deleteHomeDirectories ); } else { rep.deleteRepositoryDirectory( getDirectory() ); } if ( getParent().getChildren().contains( this ) ) { getParent().getChildren().remove( this ); } if ( getParent().getRepositoryObjects().contains( this ) ) { getParent().getRepositoryObjects().remove( this ); } getParent().refresh(); } UIEERepositoryDirectory(); UIEERepositoryDirectory( RepositoryDirectoryInterface rd, UIRepositoryDirectory uiParent, Repository rep ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); void delete( boolean deleteHomeDirectories ); void setName( String name, boolean renameHomeDirectories ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); }
@Test public void testDelete() throws Exception { UIRepositoryDirectories mockUIRepositoryDirectories = mock( UIRepositoryDirectories.class ); when( mockUIRepositoryDirectories.contains( uiRepDir ) ).thenReturn( true ); when( mockParent.getChildren() ).thenReturn( mockUIRepositoryDirectories ); UIRepositoryObjects mockUIRepositoryObjects = mock( UIRepositoryObjects.class ); when( mockUIRepositoryObjects.contains( uiRepDir ) ).thenReturn( true ); when( mockParent.getRepositoryObjects() ).thenReturn( mockUIRepositoryObjects ); uiRepDir.delete( false ); verify( mockRepository ).deleteRepositoryDirectory( mockRepositoryDirectory ); verify( mockUIRepositoryDirectories, times( 1 ) ).remove( uiRepDir ); verify( mockUIRepositoryObjects, times( 1 ) ).remove( uiRepDir ); verify( mockParent, times( 1 ) ).refresh(); uiPurRepDir.delete( false ); verify( mockPurRepository ).deleteRepositoryDirectory( mockRepositoryDirectory, false ); verify( mockUIRepositoryDirectories, times( 2 ) ).remove( uiPurRepDir ); verify( mockUIRepositoryObjects, times( 2 ) ).remove( uiPurRepDir ); verify( mockParent, times( 2 ) ).refresh(); }
UIEERepositoryDirectory extends UIRepositoryDirectory implements IAclObject, java.io.Serializable { public void setName( String name, boolean renameHomeDirectories ) throws Exception { if ( getDirectory().getName().equalsIgnoreCase( name ) ) { return; } if ( rep instanceof RepositoryExtended ) { ( (RepositoryExtended) rep ).renameRepositoryDirectory( getDirectory().getObjectId(), null, name, renameHomeDirectories ); } else { rep.renameRepositoryDirectory( getDirectory().getObjectId(), null, name ); } obj = rep.getObjectInformation( getObjectId(), getRepositoryElementType() ); refresh(); } UIEERepositoryDirectory(); UIEERepositoryDirectory( RepositoryDirectoryInterface rd, UIRepositoryDirectory uiParent, Repository rep ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); void delete( boolean deleteHomeDirectories ); void setName( String name, boolean renameHomeDirectories ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); }
@Test public void testSetName() throws Exception { final String newDirName = "foo"; when( mockRepositoryDirectory.getName() ).thenReturn( "dirName" ); uiRepDir.setName( newDirName, true ); verify( mockRepository ).renameRepositoryDirectory( mockRepositoryDirectory.getObjectId(), null, newDirName ); uiPurRepDir.setName( newDirName, true ); verify( mockPurRepository ).renameRepositoryDirectory( mockRepositoryDirectory.getObjectId(), null, newDirName, true ); }
UIEERepositoryDirectory extends UIRepositoryDirectory implements IAclObject, java.io.Serializable { @Override public boolean hasAccess( RepositoryFilePermission perm ) throws KettleException { if ( hasAccess == null ) { hasAccess = new HashMap<RepositoryFilePermission, Boolean>(); } if ( hasAccess.get( perm ) == null ) { hasAccess.put( perm, new Boolean( aclService.hasAccess( getObjectId(), perm ) ) ); } return hasAccess.get( perm ).booleanValue(); } UIEERepositoryDirectory(); UIEERepositoryDirectory( RepositoryDirectoryInterface rd, UIRepositoryDirectory uiParent, Repository rep ); void getAcls( UIRepositoryObjectAcls acls, boolean forceParentInheriting ); void getAcls( UIRepositoryObjectAcls acls ); void setAcls( UIRepositoryObjectAcls security ); void delete( boolean deleteHomeDirectories ); void setName( String name, boolean renameHomeDirectories ); @Override void clearAcl(); @Override boolean hasAccess( RepositoryFilePermission perm ); }
@Test public void testAccess() throws Exception { when( mockAclService.hasAccess( mockObjectId, RepositoryFilePermission.READ ) ).thenReturn( true ); when( mockAclService.hasAccess( mockObjectId, RepositoryFilePermission.WRITE ) ).thenReturn( false ); assertTrue( uiPurRepDir.hasAccess( RepositoryFilePermission.READ ) ); assertFalse( uiPurRepDir.hasAccess( RepositoryFilePermission.WRITE ) ); }
UIRepositoryObjectAclModel extends XulEventSourceAdapter implements java.io.Serializable { public void assignRoles( List<Object> rolesToAssign ) { List<UIRepositoryObjectAcl> acls = new ArrayList<UIRepositoryObjectAcl>(); for ( Object role : rolesToAssign ) { if ( role instanceof String ) { String roleToAssign = (String) role; acls.add( assignRole( roleToAssign ) ); } } this.firePropertyChange( "selectedRoleList", null, getSelectedRoleList() ); setSelectedAssignedRoles( acls ); setSelectedAvailableRoles( new ArrayList<String>() ); } UIRepositoryObjectAclModel( UIRepositoryObjectAcls acls ); List<UIRepositoryObjectAcl> getAclsToAdd(); void setAclsToRemove( List<UIRepositoryObjectAcl> aclsToRemove ); List<String> getSelectedAvailableRoles(); void setSelectedAvailableRoles( List<String> selectedAvailableRoles ); void setSelectedAvailableRole( String selectedAvailableRole ); List<String> getSelectedAvailableUsers(); void setSelectedAvailableUsers( List<String> selectedAvailableUsers ); void setSelectedAvailableUser( String selectedAvailableUser ); List<UIRepositoryObjectAcl> getSelectedAssignedRoles(); void setSelectedAssignedRoles( List<UIRepositoryObjectAcl> selectedAssignedRoles ); List<UIRepositoryObjectAcl> getSelectedAssignedUsers(); void setSelectedAssignedUsers( List<UIRepositoryObjectAcl> selectedAssignedUsers ); void setSelectedAssignedUser( UIRepositoryObjectAcl selectedAssignedUser ); List<String> getAvailableUserList(); boolean isUserAssignmentPossible(); void setUserAssignmentPossible( boolean userAssignmentPossible ); boolean isUserUnassignmentPossible(); void setUserUnassignmentPossible( boolean userUnassignmentPossible ); boolean isRoleAssignmentPossible(); void setRoleAssignmentPossible( boolean roleAssignmentPossible ); boolean isRoleUnassignmentPossible(); void setRoleUnassignmentPossible( boolean roleUnassignmentPossible ); void setAclsList( List<String> userList, List<String> roleList ); void setAvailableUserList( List<String> userList ); List<String> getAvailableRoleList(); void setAvailableRoleList( List<String> roleList ); void assignRoles( List<Object> rolesToAssign ); UIRepositoryObjectAcl assignRole( String roleToAssign ); UIRepositoryObjectAcl getAcl( String aclName ); void unassign( List<Object> toUnassign ); void assignUsers( List<Object> usersToAssign ); UIRepositoryObjectAcl assignUser( String userToAssign ); UIRepositoryObjectAcls getSelectedAcls(); void setSelectedAcls( UIRepositoryObjectAcls selectedAcls ); List<UIRepositoryObjectAcl> getSelectedUserList(); List<UIRepositoryObjectAcl> getSelectedRoleList(); String getAvailableUser( int index ); int getAvailableUserIndex( String user ); String getAvailableRole( int index ); int getAvailableRoleIndex( String role ); boolean findByRecipientName( String recipientName ); void clear(); void updateSelectedAcls(); UIRepositoryObjectAcl getSelectedUser( int index ); int getSelectedUserIndex( UIRepositoryObjectAcl user ); UIRepositoryObjectAcl getSelectedRole( int index ); int getSelectedRoleIndex( UIRepositoryObjectAcl role ); }
@Test public void testAssignRoles() { UIRepositoryObjectAcl selectedRoleAcl = new UIRepositoryObjectAcl( createRoleAce( ROLE1 ) ); repositoryObjectAcls.addAcl( selectedRoleAcl ); repositoryObjectAclModel.setAclsList( null, defaultRoleNameList ); List<Object> objectRoleList = Arrays.asList( new Object[] { ROLE2 } ); repositoryObjectAclModel.assignRoles( objectRoleList ); assertStringListMatches( Arrays.asList( new String[] { ROLE3 } ), repositoryObjectAclModel.getAvailableRoleList() ); assertNameToAclListMatches( Arrays.asList( new String[] { ROLE2 } ), repositoryObjectAclModel .getSelectedAssignedRoles() ); assertNameToAclListMatches( Arrays.asList( new String[] { ROLE2 } ), repositoryObjectAclModel.getAclsToAdd() ); repositoryObjectAclModel.updateSelectedAcls(); assertNameToAclListMatches( Arrays.asList( new String[] { ROLE1, ROLE2 } ), repositoryObjectAclModel .getSelectedAcls().getAcls() ); repositoryObjectAclModel.getAclsToAdd().clear(); UIRepositoryObjectAcl role2Acl = repositoryObjectAclModel.getSelectedRole( 1 ); repositoryObjectAclModel.unassign( Arrays.asList( new Object[] { role2Acl, selectedRoleAcl } ) ); assertEquals( 0, repositoryObjectAclModel.getSelectedAssignedRoles().size() ); assertStringListMatches( defaultRoleNameList, repositoryObjectAclModel.getAvailableRoleList() ); repositoryObjectAclModel.updateSelectedAcls(); assertEquals( 0, repositoryObjectAclModel.getSelectedAcls().getAcls().size() ); }
UIRepositoryObjectAclModel extends XulEventSourceAdapter implements java.io.Serializable { public void assignUsers( List<Object> usersToAssign ) { List<UIRepositoryObjectAcl> previousVal = new ArrayList<UIRepositoryObjectAcl>(); previousVal.addAll( getSelectedUserList() ); List<UIRepositoryObjectAcl> assignList = new ArrayList<UIRepositoryObjectAcl>(); for ( Object user : usersToAssign ) { if ( user instanceof String ) assignList.add( assignUser( (String) user ) ); } this.firePropertyChange( "selectedUserList", null, getSelectedUserList() ); setSelectedAssignedUsers( assignList ); setSelectedAvailableUsers( new ArrayList<String>() ); } UIRepositoryObjectAclModel( UIRepositoryObjectAcls acls ); List<UIRepositoryObjectAcl> getAclsToAdd(); void setAclsToRemove( List<UIRepositoryObjectAcl> aclsToRemove ); List<String> getSelectedAvailableRoles(); void setSelectedAvailableRoles( List<String> selectedAvailableRoles ); void setSelectedAvailableRole( String selectedAvailableRole ); List<String> getSelectedAvailableUsers(); void setSelectedAvailableUsers( List<String> selectedAvailableUsers ); void setSelectedAvailableUser( String selectedAvailableUser ); List<UIRepositoryObjectAcl> getSelectedAssignedRoles(); void setSelectedAssignedRoles( List<UIRepositoryObjectAcl> selectedAssignedRoles ); List<UIRepositoryObjectAcl> getSelectedAssignedUsers(); void setSelectedAssignedUsers( List<UIRepositoryObjectAcl> selectedAssignedUsers ); void setSelectedAssignedUser( UIRepositoryObjectAcl selectedAssignedUser ); List<String> getAvailableUserList(); boolean isUserAssignmentPossible(); void setUserAssignmentPossible( boolean userAssignmentPossible ); boolean isUserUnassignmentPossible(); void setUserUnassignmentPossible( boolean userUnassignmentPossible ); boolean isRoleAssignmentPossible(); void setRoleAssignmentPossible( boolean roleAssignmentPossible ); boolean isRoleUnassignmentPossible(); void setRoleUnassignmentPossible( boolean roleUnassignmentPossible ); void setAclsList( List<String> userList, List<String> roleList ); void setAvailableUserList( List<String> userList ); List<String> getAvailableRoleList(); void setAvailableRoleList( List<String> roleList ); void assignRoles( List<Object> rolesToAssign ); UIRepositoryObjectAcl assignRole( String roleToAssign ); UIRepositoryObjectAcl getAcl( String aclName ); void unassign( List<Object> toUnassign ); void assignUsers( List<Object> usersToAssign ); UIRepositoryObjectAcl assignUser( String userToAssign ); UIRepositoryObjectAcls getSelectedAcls(); void setSelectedAcls( UIRepositoryObjectAcls selectedAcls ); List<UIRepositoryObjectAcl> getSelectedUserList(); List<UIRepositoryObjectAcl> getSelectedRoleList(); String getAvailableUser( int index ); int getAvailableUserIndex( String user ); String getAvailableRole( int index ); int getAvailableRoleIndex( String role ); boolean findByRecipientName( String recipientName ); void clear(); void updateSelectedAcls(); UIRepositoryObjectAcl getSelectedUser( int index ); int getSelectedUserIndex( UIRepositoryObjectAcl user ); UIRepositoryObjectAcl getSelectedRole( int index ); int getSelectedRoleIndex( UIRepositoryObjectAcl role ); }
@Test public void testAssignUsers() { UIRepositoryObjectAcl selectedUserAcl = new UIRepositoryObjectAcl( createUserAce( USER1 ) ); repositoryObjectAcls.addAcl( selectedUserAcl ); repositoryObjectAclModel.setAclsList( defaultUserNameList, null ); List<Object> objectUserList = Arrays.asList( new Object[] { USER2 } ); repositoryObjectAclModel.assignUsers( objectUserList ); assertStringListMatches( Arrays.asList( new String[] { USER3 } ), repositoryObjectAclModel.getAvailableUserList() ); assertNameToAclListMatches( Arrays.asList( new String[] { USER2 } ), repositoryObjectAclModel .getSelectedAssignedUsers() ); assertNameToAclListMatches( Arrays.asList( new String[] { USER2 } ), repositoryObjectAclModel.getAclsToAdd() ); repositoryObjectAclModel.updateSelectedAcls(); assertNameToAclListMatches( Arrays.asList( new String[] { USER1, USER2 } ), repositoryObjectAclModel .getSelectedAcls().getAcls() ); repositoryObjectAclModel.getAclsToAdd().clear(); UIRepositoryObjectAcl user2Acl = repositoryObjectAclModel.getSelectedUser( 1 ); repositoryObjectAclModel.unassign( Arrays.asList( new Object[] { user2Acl, selectedUserAcl } ) ); assertEquals( 0, repositoryObjectAclModel.getSelectedAssignedUsers().size() ); assertStringListMatches( defaultUserNameList, repositoryObjectAclModel.getAvailableUserList() ); repositoryObjectAclModel.updateSelectedAcls(); assertEquals( 0, repositoryObjectAclModel.getSelectedAcls().getAcls().size() ); }
KettleFileRepositoryMeta extends BaseRepositoryMeta implements RepositoryMeta { @Override public void populate( Map<String, Object> properties, RepositoriesMeta repositoriesMeta ) { super.populate( properties, repositoriesMeta ); Boolean showHiddenFolders = (Boolean) properties.get( SHOW_HIDDEN_FOLDERS ); String location = (String) properties.get( LOCATION ); Boolean doNotModify = (Boolean) properties.get( DO_NOT_MODIFY ); setHidingHiddenFiles( showHiddenFolders ); setBaseDirectory( location ); setReadOnly( doNotModify ); } KettleFileRepositoryMeta(); KettleFileRepositoryMeta( String id, String name, String description, String baseDirectory ); RepositoryCapabilities getRepositoryCapabilities(); String getXML(); void loadXML( Node repnode, List<DatabaseMeta> databases ); String getBaseDirectory(); void setBaseDirectory( String baseDirectory ); boolean isReadOnly(); void setReadOnly( boolean readOnly ); RepositoryMeta clone(); @Override void populate( Map<String, Object> properties, RepositoriesMeta repositoriesMeta ); @SuppressWarnings( "unchecked" ) @Override JSONObject toJSONObject(); boolean isHidingHiddenFiles(); void setHidingHiddenFiles( boolean hidingHiddenFiles ); static final String SHOW_HIDDEN_FOLDERS; static final String LOCATION; static final String DO_NOT_MODIFY; static String REPOSITORY_TYPE_ID; }
@Test public void testPopulate() throws Exception { Map<String, Object> properties = new HashMap<>(); properties.put( "displayName", NAME ); properties.put( "showHiddenFolders", true ); properties.put( "description", DESCRIPTION ); properties.put( "location", THIS_IS_THE_PATH ); properties.put( "doNotModify", true ); properties.put( "isDefault", true ); kettleFileRepositoryMeta.populate( properties, repositoriesMeta ); assertEquals( NAME, kettleFileRepositoryMeta.getName() ); assertEquals( true, kettleFileRepositoryMeta.isHidingHiddenFiles() ); assertEquals( DESCRIPTION, kettleFileRepositoryMeta.getDescription() ); assertEquals( THIS_IS_THE_PATH, kettleFileRepositoryMeta.getBaseDirectory() ); assertEquals( true, kettleFileRepositoryMeta.isReadOnly() ); assertEquals( true, kettleFileRepositoryMeta.isDefault() ); }
UIRepositoryObjectAclModel extends XulEventSourceAdapter implements java.io.Serializable { public void clear() { aclsToAdd.clear(); aclsToRemove.clear(); masterAvailableRoleList.clear(); masterAvailableUserList.clear(); availableRoleList.clear(); availableUserList.clear(); selectedAvailableRoles.clear(); selectedAvailableUsers.clear(); selectedAssignedRoles.clear(); selectedAssignedUsers.clear(); setRoleAssignmentPossible( false ); setRoleUnassignmentPossible( false ); setUserAssignmentPossible( false ); setUserUnassignmentPossible( false ); } UIRepositoryObjectAclModel( UIRepositoryObjectAcls acls ); List<UIRepositoryObjectAcl> getAclsToAdd(); void setAclsToRemove( List<UIRepositoryObjectAcl> aclsToRemove ); List<String> getSelectedAvailableRoles(); void setSelectedAvailableRoles( List<String> selectedAvailableRoles ); void setSelectedAvailableRole( String selectedAvailableRole ); List<String> getSelectedAvailableUsers(); void setSelectedAvailableUsers( List<String> selectedAvailableUsers ); void setSelectedAvailableUser( String selectedAvailableUser ); List<UIRepositoryObjectAcl> getSelectedAssignedRoles(); void setSelectedAssignedRoles( List<UIRepositoryObjectAcl> selectedAssignedRoles ); List<UIRepositoryObjectAcl> getSelectedAssignedUsers(); void setSelectedAssignedUsers( List<UIRepositoryObjectAcl> selectedAssignedUsers ); void setSelectedAssignedUser( UIRepositoryObjectAcl selectedAssignedUser ); List<String> getAvailableUserList(); boolean isUserAssignmentPossible(); void setUserAssignmentPossible( boolean userAssignmentPossible ); boolean isUserUnassignmentPossible(); void setUserUnassignmentPossible( boolean userUnassignmentPossible ); boolean isRoleAssignmentPossible(); void setRoleAssignmentPossible( boolean roleAssignmentPossible ); boolean isRoleUnassignmentPossible(); void setRoleUnassignmentPossible( boolean roleUnassignmentPossible ); void setAclsList( List<String> userList, List<String> roleList ); void setAvailableUserList( List<String> userList ); List<String> getAvailableRoleList(); void setAvailableRoleList( List<String> roleList ); void assignRoles( List<Object> rolesToAssign ); UIRepositoryObjectAcl assignRole( String roleToAssign ); UIRepositoryObjectAcl getAcl( String aclName ); void unassign( List<Object> toUnassign ); void assignUsers( List<Object> usersToAssign ); UIRepositoryObjectAcl assignUser( String userToAssign ); UIRepositoryObjectAcls getSelectedAcls(); void setSelectedAcls( UIRepositoryObjectAcls selectedAcls ); List<UIRepositoryObjectAcl> getSelectedUserList(); List<UIRepositoryObjectAcl> getSelectedRoleList(); String getAvailableUser( int index ); int getAvailableUserIndex( String user ); String getAvailableRole( int index ); int getAvailableRoleIndex( String role ); boolean findByRecipientName( String recipientName ); void clear(); void updateSelectedAcls(); UIRepositoryObjectAcl getSelectedUser( int index ); int getSelectedUserIndex( UIRepositoryObjectAcl user ); UIRepositoryObjectAcl getSelectedRole( int index ); int getSelectedRoleIndex( UIRepositoryObjectAcl role ); }
@Test public void testClear() { repositoryObjectAcls.addAcl( new UIRepositoryObjectAcl( createUserAce( USER1 ) ) ); repositoryObjectAcls.addAcl( new UIRepositoryObjectAcl( createRoleAce( ROLE1 ) ) ); repositoryObjectAclModel.setAclsList( defaultUserNameList, defaultRoleNameList ); repositoryObjectAclModel.assignRoles( Arrays.asList( new Object[] { ROLE2 } ) ); repositoryObjectAclModel.assignUsers( Arrays.asList( new Object[] { USER2 } ) ); repositoryObjectAclModel.clear(); assertEquals( 0, repositoryObjectAclModel.getAvailableUserList().size() ); assertEquals( 0, repositoryObjectAclModel.getAvailableRoleList().size() ); assertEquals( 0, repositoryObjectAclModel.getSelectedAssignedUsers().size() ); assertEquals( 0, repositoryObjectAclModel.getSelectedAssignedRoles().size() ); assertEquals( 0, repositoryObjectAclModel.getSelectedAvailableUsers().size() ); assertEquals( 0, repositoryObjectAclModel.getSelectedAvailableRoles().size() ); assertEquals( 1, repositoryObjectAclModel.getSelectedUserList().size() ); assertEquals( 1, repositoryObjectAclModel.getSelectedRoleList().size() ); }
TrashBrowseController extends BrowseController implements java.io.Serializable { public void undelete() { List<UIDeletedObject> selectedTrashFileItemsSnapshot = new ArrayList<UIDeletedObject>( selectedTrashFileItems ); if ( selectedTrashFileItemsSnapshot != null && selectedTrashFileItemsSnapshot.size() > 0 ) { List<ObjectId> ids = new ArrayList<ObjectId>(); for ( UIDeletedObject uiObj : selectedTrashFileItemsSnapshot ) { ids.add( uiObj.getId() ); } try { trashService.undelete( ids ); setTrash( trashService.getTrash() ); for ( UIDeletedObject uiObj : selectedTrashFileItemsSnapshot ) { RepositoryDirectoryInterface dir = repository.findDirectory( uiObj.getOriginalParentPath() ); while ( dir != null && dirMap.get( dir.getObjectId() ) == null ) { dir = dir.getParent(); } if ( dir != null ) { dirMap.get( dir.getObjectId() ).refresh(); } if ( RepositoryObjectType.TRANSFORMATION.name().equals( uiObj.getType() ) ) { TransMeta transMeta = repository.loadTransformation( uiObj.getId(), null ); ExtensionPointHandler .callExtensionPoint( LogChannel.GENERAL, KettleExtensionPoint.TransAfterOpen.id, transMeta ); transMeta.clearChanged(); } else if ( !RepositoryObjectType.JOB.name().equals( uiObj.getType() ) ) { RepositoryDirectoryInterface actualDir = repository.findDirectory( uiObj.getOriginalParentPath() + RepositoryDirectory.DIRECTORY_SEPARATOR + uiObj.getName() ); if ( actualDir != null ) { List<RepositoryElementMetaInterface> transformations = new ArrayList<>(); getAllTransformations( actualDir, transformations ); for ( RepositoryElementMetaInterface repositoryElementMetaInterface : transformations ) { TransMeta transMeta = repository.loadTransformation( repositoryElementMetaInterface.getObjectId(), null ); ExtensionPointHandler .callExtensionPoint( LogChannel.GENERAL, KettleExtensionPoint.TransAfterOpen.id, transMeta ); transMeta.clearChanged(); } } else { displayExceptionMessage( BaseMessages.getString( PKG, "TrashBrowseController.UnableToRestoreDirectory", uiObj.getOriginalParentPath() + RepositoryDirectory.DIRECTORY_SEPARATOR + uiObj .getName() ) ); } } } deck.setSelectedIndex( 1 ); } catch ( Throwable th ) { if ( mainController == null || !mainController.handleLostRepository( th ) ) { displayExceptionMessage( BaseMessages.getString( PKG, "TrashBrowseController.UnableToRestoreFile", th.getLocalizedMessage() ) ); } } } else { throw new RuntimeException(); } } TrashBrowseController(); @Override void init( Repository repository ); @Override void setSelectedFolderItems( List<UIRepositoryDirectory> selectedFolderItems ); void setTrash( List<IDeletedObject> trash ); List<IDeletedObject> getTrash(); void delete(); void undelete(); void setSelectedTrashFileItems( List<UIDeletedObject> selectedTrashFileItems ); }
@Test public void testUnDeleteNoFileSelected() throws Exception { when( selectedTrashFileItemsMock.toArray() ).thenReturn( new TrashBrowseController.UIDeletedObject[0] ); when( selectedTrashFileItemsMock.size() ).thenReturn( 0 ); expectedException.expect( RuntimeException.class ); trBrController.undelete(); } @Test public void testUnDeleteTransformation() throws Exception { testUnDelete( RepositoryObjectType.TRANSFORMATION.name(), true ); verify( trashServiceMock, times( 1 ) ).undelete( anyList() ); verify( transMetaMock, times( 1 ) ).clearChanged(); verify( repositoryMock, times( 1 ) ).loadTransformation( objectIdMock, null ); verify( deckMock, times( 1 ) ).setSelectedIndex( 1 ); } @Test public void testUnDeleteJob() throws Exception { testUnDelete( RepositoryObjectType.JOB.name(), true ); verify( trashServiceMock, times( 1 ) ).undelete( anyList() ); verify( transMetaMock, never() ).clearChanged(); verify( repositoryMock, never() ).loadTransformation( objectIdMock, null ); verify( deckMock, times( 1 ) ).setSelectedIndex( 1 ); } @Test public void testClosestUIRepositoryDirectory() throws Exception { testUnDelete( RepositoryObjectType.JOB.name(), false ); verify( trashServiceMock, times( 1 ) ).undelete( anyList() ); verify( transMetaMock, never() ).clearChanged(); verify( repositoryMock, never() ).loadTransformation( objectIdMock, null ); verify( repoDirMock, times( 1 ) ).refresh(); verify( deckMock, times( 1 ) ).setSelectedIndex( 1 ); } @Test public void testExceptionHandle() throws Exception { RuntimeException runtimeException = new RuntimeException( "Exception handle" ); when( selectedTrashFileItemsMock.toArray() ) .thenReturn( new TrashBrowseController.UIDeletedObject[] { uiDirectoryObjectMock } ); when( selectedTrashFileItemsMock.size() ).thenReturn( 1 ); doReturn( uiDirectoryObjectMock ).when( selectedTrashFileItemsMock ).get( 0 ); doThrow( runtimeException ).when( trashServiceMock ).undelete( anyList() ); doReturn( false ).when( mainControllerMock ).handleLostRepository( any( Throwable.class ) ); trBrController.undelete(); verify( messageBoxMock ).setTitle( "Error" ); verify( messageBoxMock ).setAcceptLabel( "OK" ); verify( messageBoxMock ).setMessage( contains( "Exception handle" ) ); verify( messageBoxMock, times( 1 ) ).open(); verify( deckMock, never() ).setSelectedIndex( 1 ); } @Test public void testExceptionNotHandle() throws Exception { RuntimeException runtimeException = new RuntimeException( "Exception handle" ); when( selectedTrashFileItemsMock.toArray() ) .thenReturn( new TrashBrowseController.UIDeletedObject[] { uiDirectoryObjectMock } ); when( selectedTrashFileItemsMock.size() ).thenReturn( 1 ); doReturn( uiDirectoryObjectMock ).when( selectedTrashFileItemsMock ).get( 0 ); doThrow( runtimeException ).when( trashServiceMock ).undelete( anyList() ); doReturn( true ).when( mainControllerMock ).handleLostRepository( any( Throwable.class ) ); trBrController.undelete(); verify( messageBoxMock, never() ).setTitle( "Error" ); verify( messageBoxMock, never() ).setAcceptLabel( "OK" ); verify( messageBoxMock, never() ).setMessage( contains( "Exception handle" ) ); verify( messageBoxMock, never() ).open(); verify( deckMock, never() ).setSelectedIndex( 1 ); }
RepositoryLockController extends AbstractXulEventHandler implements IUISupportController, java.io.Serializable { public void setMenuItemEnabledState( List<UIRepositoryObject> selectedRepoObjects ) { try { boolean result = false; if ( selectedRepoObjects.size() == 1 && selectedRepoObjects.get( 0 ) instanceof UIRepositoryDirectory ) { lockFileMenuItem.setDisabled( true ); deleteFileMenuItem.setDisabled( false ); renameFileMenuItem.setDisabled( false ); } else if ( selectedRepoObjects.size() == 1 && selectedRepoObjects.get( 0 ) instanceof ILockObject ) { final UIRepositoryContent contentToLock = (UIRepositoryContent) selectedRepoObjects.get( 0 ); if ( ( (ILockObject) contentToLock ).isLocked() ) { if ( repository instanceof PurRepository || repository.getRepositoryMeta() instanceof PurRepositoryMeta ) { result = service.canUnlockFileById( contentToLock.getObjectId() ); } else { result = ( (ILockObject) contentToLock ).getRepositoryLock().getLogin().equalsIgnoreCase( repository.getUserInfo().getLogin() ); } lockFileMenuItem.setDisabled( !result ); deleteFileMenuItem.setDisabled( !result ); renameFileMenuItem.setDisabled( !result ); } else { lockFileMenuItem.setDisabled( false ); deleteFileMenuItem.setDisabled( false ); renameFileMenuItem.setDisabled( false ); } } else { lockFileMenuItem.setDisabled( true ); deleteFileMenuItem.setDisabled( true ); renameFileMenuItem.setDisabled( true ); } } catch ( Exception e ) { throw new RuntimeException( e ); } } void init( Repository rep ); void onDragFromGlobalTree( DropEvent event ); void onDragFromLocalTable( DropEvent event ); String getName(); void lockContent(); void viewLockNote(); void setMenuItemEnabledState( List<UIRepositoryObject> selectedRepoObjects ); }
@Test public void testBlockLock() throws Exception { RepositoryLockController repositoryLockController = new RepositoryLockController(); List<UIRepositoryObject> selectedRepoObjects = new ArrayList<>(); UIEETransformation lockObject = Mockito.mock( UIEETransformation.class ); selectedRepoObjects.add( lockObject ); Mockito.when( lockObject.isLocked() ).thenReturn( true ); ObjectId objectId = Mockito.mock( ObjectId.class ); Mockito.when( lockObject.getObjectId() ).thenReturn( objectId ); XulMenuitem lockFileMenuItem = Mockito.mock( XulMenuitem.class ); Field lockFileMenuItemField = repositoryLockController.getClass().getDeclaredField( "lockFileMenuItem" ); lockFileMenuItemField.setAccessible( true ); lockFileMenuItemField.set( repositoryLockController, lockFileMenuItem ); XulMenuitem deleteFileMenuItem = Mockito.mock( XulMenuitem.class ); Field deleteFileMenuItemField = repositoryLockController.getClass().getDeclaredField( "deleteFileMenuItem" ); deleteFileMenuItemField.setAccessible( true ); deleteFileMenuItemField.set( repositoryLockController, deleteFileMenuItem ); XulMenuitem renameFileMenuItem = Mockito.mock( XulMenuitem.class ); Field renameFileMenuItemField = repositoryLockController.getClass().getDeclaredField( "renameFileMenuItem" ); renameFileMenuItemField.setAccessible( true ); renameFileMenuItemField.set( repositoryLockController, renameFileMenuItem ); Repository repository = Mockito.mock( Repository.class ); PurRepositoryMeta repositoryMeta = Mockito.mock( PurRepositoryMeta.class ); Mockito.when( repository.getRepositoryMeta() ).thenReturn( repositoryMeta ); Field repositoryField = repositoryLockController.getClass().getDeclaredField( "repository" ); repositoryField.setAccessible( true ); repositoryField.set( repositoryLockController, repository ); ILockService service = Mockito.mock( ILockService.class ); Mockito.when( service.canUnlockFileById( objectId ) ).thenReturn( true ); Field serviceField = repositoryLockController.getClass().getDeclaredField( "service" ); serviceField.setAccessible( true ); serviceField.set( repositoryLockController, service ); repositoryLockController.setMenuItemEnabledState( selectedRepoObjects ); Assert.assertFalse( lockFileMenuItem.isDisabled() ); Mockito.verify( lockFileMenuItem ).setDisabled( false ); }
PermissionsCheckboxHandler { public void setAllChecked( boolean value ) { for ( PermissionsCheckboxes permissionsCheckboxes : ALL_PERMISSIONS ) { permissionsCheckboxes.permissionCheckbox.setChecked( value ); } } PermissionsCheckboxHandler( XulCheckbox readCheckbox, XulCheckbox writeCheckbox, XulCheckbox deleteCheckbox, XulCheckbox manageCheckbox ); EnumSet<RepositoryFilePermission> processCheckboxes(); EnumSet<RepositoryFilePermission> processCheckboxes( boolean enableAppropriate ); void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ); void updateCheckboxes( boolean enableAppropriate, EnumSet<RepositoryFilePermission> permissionEnumSet ); void setAllChecked( boolean value ); void setAllDisabled( boolean value ); }
@Test public void testSetAllUncheckedUnchecksAll() { boolean checked = false; permissionsCheckboxHandler.setAllChecked( checked ); verify( readCheckbox, times( 1 ) ).setChecked( checked ); verify( writeCheckbox, times( 1 ) ).setChecked( checked ); verify( deleteCheckbox, times( 1 ) ).setChecked( checked ); verify( manageCheckbox, times( 1 ) ).setChecked( checked ); } @Test public void testSetAllCheckedChecksAll() { boolean checked = true; permissionsCheckboxHandler.setAllChecked( checked ); verify( readCheckbox, times( 1 ) ).setChecked( checked ); verify( writeCheckbox, times( 1 ) ).setChecked( checked ); verify( deleteCheckbox, times( 1 ) ).setChecked( checked ); verify( manageCheckbox, times( 1 ) ).setChecked( checked ); }
RepositoriesMeta { public String toString() { return getClass().getSimpleName(); } RepositoriesMeta(); void clear(); void addDatabase( DatabaseMeta ci ); void addRepository( RepositoryMeta ri ); void addDatabase( int p, DatabaseMeta ci ); void addRepository( int p, RepositoryMeta ri ); DatabaseMeta getDatabase( int i ); RepositoryMeta getRepository( int i ); void removeDatabase( int i ); void removeRepository( int i ); int nrDatabases(); int nrRepositories(); DatabaseMeta searchDatabase( String name ); RepositoryMeta searchRepository( String name ); int indexOfDatabase( DatabaseMeta di ); int indexOfRepository( RepositoryMeta ri ); RepositoryMeta findRepository( String name ); RepositoryMeta findRepositoryById( String id ); boolean readData(); void readDataFromInputStream( InputStream is ); String getXML(); void writeData(); String toString(); RepositoriesMeta clone(); String getErrorMessage(); LogChannelInterface getLog(); }
@Test public void testToString() throws Exception { RepositoriesMeta repositoriesMeta = new RepositoriesMeta(); assertEquals( "RepositoriesMeta", repositoriesMeta.toString() ); }
PermissionsCheckboxHandler { public void setAllDisabled( boolean value ) { for ( PermissionsCheckboxes permissionsCheckboxes : ALL_PERMISSIONS ) { permissionsCheckboxes.permissionCheckbox.setDisabled( value ); } } PermissionsCheckboxHandler( XulCheckbox readCheckbox, XulCheckbox writeCheckbox, XulCheckbox deleteCheckbox, XulCheckbox manageCheckbox ); EnumSet<RepositoryFilePermission> processCheckboxes(); EnumSet<RepositoryFilePermission> processCheckboxes( boolean enableAppropriate ); void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ); void updateCheckboxes( boolean enableAppropriate, EnumSet<RepositoryFilePermission> permissionEnumSet ); void setAllChecked( boolean value ); void setAllDisabled( boolean value ); }
@Test public void testSetAllDisabledDisablesAll() { boolean disabled = true; permissionsCheckboxHandler.setAllDisabled( disabled ); verify( readCheckbox, times( 1 ) ).setDisabled( disabled ); verify( writeCheckbox, times( 1 ) ).setDisabled( disabled ); verify( deleteCheckbox, times( 1 ) ).setDisabled( disabled ); verify( manageCheckbox, times( 1 ) ).setDisabled( disabled ); } @Test public void testSetAllEnabledEnablesAll() { boolean disabled = false; permissionsCheckboxHandler.setAllDisabled( disabled ); verify( readCheckbox, times( 1 ) ).setDisabled( disabled ); verify( writeCheckbox, times( 1 ) ).setDisabled( disabled ); verify( deleteCheckbox, times( 1 ) ).setDisabled( disabled ); verify( manageCheckbox, times( 1 ) ).setDisabled( disabled ); }
PermissionsCheckboxHandler { public EnumSet<RepositoryFilePermission> processCheckboxes() { return processCheckboxes( false ); } PermissionsCheckboxHandler( XulCheckbox readCheckbox, XulCheckbox writeCheckbox, XulCheckbox deleteCheckbox, XulCheckbox manageCheckbox ); EnumSet<RepositoryFilePermission> processCheckboxes(); EnumSet<RepositoryFilePermission> processCheckboxes( boolean enableAppropriate ); void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ); void updateCheckboxes( boolean enableAppropriate, EnumSet<RepositoryFilePermission> permissionEnumSet ); void setAllChecked( boolean value ); void setAllDisabled( boolean value ); }
@Test public void testProcessCheckboxesNoneCheckedEnableAppropriateTrue() { when( readCheckbox.isChecked() ).thenReturn( false ); when( writeCheckbox.isChecked() ).thenReturn( false ); when( deleteCheckbox.isChecked() ).thenReturn( false ); when( manageCheckbox.isChecked() ).thenReturn( false ); assertEquals( EnumSet.noneOf( RepositoryFilePermission.class ), permissionsCheckboxHandler.processCheckboxes( true ) ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); verify( readCheckbox, times( 1 ) ).setDisabled( false ); } @Test public void testProcessCheckboxesNoneCheckedEnableAppropriateFalse() { when( readCheckbox.isChecked() ).thenReturn( false ); when( writeCheckbox.isChecked() ).thenReturn( false ); when( deleteCheckbox.isChecked() ).thenReturn( false ); when( manageCheckbox.isChecked() ).thenReturn( false ); assertEquals( EnumSet.noneOf( RepositoryFilePermission.class ), permissionsCheckboxHandler.processCheckboxes() ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); verify( readCheckbox, never() ).setDisabled( false ); } @Test public void testProcessCheckboxesReadCheckedEnableAppropriateTrue() { when( readCheckbox.isChecked() ).thenReturn( true ); when( writeCheckbox.isChecked() ).thenReturn( false ); when( deleteCheckbox.isChecked() ).thenReturn( false ); when( manageCheckbox.isChecked() ).thenReturn( false ); assertEquals( EnumSet.of( RepositoryFilePermission.READ ), permissionsCheckboxHandler.processCheckboxes( true ) ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( false ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testProcessCheckboxesReadCheckedEnableAppropriateFalse() { when( readCheckbox.isChecked() ).thenReturn( true ); when( writeCheckbox.isChecked() ).thenReturn( false ); when( deleteCheckbox.isChecked() ).thenReturn( false ); when( manageCheckbox.isChecked() ).thenReturn( false ); assertEquals( EnumSet.of( RepositoryFilePermission.READ ), permissionsCheckboxHandler.processCheckboxes() ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testProcessCheckboxesWriteCheckedEnableAppropriateTrue() { when( readCheckbox.isChecked() ).thenReturn( false ); when( writeCheckbox.isChecked() ).thenReturn( true ); when( deleteCheckbox.isChecked() ).thenReturn( false ); when( manageCheckbox.isChecked() ).thenReturn( false ); assertEquals( EnumSet.of( RepositoryFilePermission.READ, RepositoryFilePermission.WRITE ), permissionsCheckboxHandler.processCheckboxes( true ) ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( false ); verify( deleteCheckbox, times( 1 ) ).setDisabled( false ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testProcessCheckboxesWriteCheckedEnableAppropriateFalse() { when( readCheckbox.isChecked() ).thenReturn( false ); when( writeCheckbox.isChecked() ).thenReturn( true ); when( deleteCheckbox.isChecked() ).thenReturn( false ); when( manageCheckbox.isChecked() ).thenReturn( false ); assertEquals( EnumSet.of( RepositoryFilePermission.READ, RepositoryFilePermission.WRITE ), permissionsCheckboxHandler.processCheckboxes() ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testProcessCheckboxesDeleteCheckedEnableAppropriateTrue() { when( readCheckbox.isChecked() ).thenReturn( false ); when( writeCheckbox.isChecked() ).thenReturn( false ); when( deleteCheckbox.isChecked() ).thenReturn( true ); when( manageCheckbox.isChecked() ).thenReturn( false ); assertEquals( EnumSet.of( RepositoryFilePermission.READ, RepositoryFilePermission.WRITE, RepositoryFilePermission.DELETE ), permissionsCheckboxHandler.processCheckboxes( true ) ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( false ); verify( manageCheckbox, times( 1 ) ).setDisabled( false ); } @Test public void testProcessCheckboxesDeleteCheckedEnableAppropriateFalse() { when( readCheckbox.isChecked() ).thenReturn( false ); when( writeCheckbox.isChecked() ).thenReturn( false ); when( deleteCheckbox.isChecked() ).thenReturn( true ); when( manageCheckbox.isChecked() ).thenReturn( false ); assertEquals( EnumSet.of( RepositoryFilePermission.READ, RepositoryFilePermission.WRITE, RepositoryFilePermission.DELETE ), permissionsCheckboxHandler.processCheckboxes() ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testProcessCheckboxesManageCheckedEnableAppropriateTrue() { when( readCheckbox.isChecked() ).thenReturn( false ); when( writeCheckbox.isChecked() ).thenReturn( false ); when( deleteCheckbox.isChecked() ).thenReturn( false ); when( manageCheckbox.isChecked() ).thenReturn( true ); assertEquals( EnumSet.of( RepositoryFilePermission.READ, RepositoryFilePermission.WRITE, RepositoryFilePermission.DELETE, RepositoryFilePermission.ACL_MANAGEMENT ), permissionsCheckboxHandler .processCheckboxes( true ) ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( false ); } @Test public void testProcessCheckboxesManageCheckedEnableAppropriateFalse() { when( readCheckbox.isChecked() ).thenReturn( false ); when( writeCheckbox.isChecked() ).thenReturn( false ); when( deleteCheckbox.isChecked() ).thenReturn( false ); when( manageCheckbox.isChecked() ).thenReturn( true ); assertEquals( EnumSet.of( RepositoryFilePermission.READ, RepositoryFilePermission.WRITE, RepositoryFilePermission.DELETE, RepositoryFilePermission.ACL_MANAGEMENT ), permissionsCheckboxHandler .processCheckboxes() ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); }
RepositoriesMeta { public boolean readData() throws KettleException { clear(); File file = new File( getKettleLocalRepositoriesFile() ); if ( !file.exists() || !file.isFile() ) { if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "RepositoryMeta.Log.NoRepositoryFileInLocalDirectory", file .getAbsolutePath() ) ); } file = new File( getKettleUserRepositoriesFile() ); if ( !file.exists() || !file.isFile() ) { return true; } } if ( log.isBasic() ) { log.logBasic( BaseMessages.getString( PKG, "RepositoryMeta.Log.ReadingXMLFile", file.getAbsoluteFile() ) ); } try { DocumentBuilderFactory dbf = XMLParserFactoryProducer.createSecureDocBuilderFactory(); DocumentBuilder db = dbf.newDocumentBuilder(); Document doc; try { doc = db.parse( file ); } catch ( FileNotFoundException ef ) { InputStream is = getClass().getResourceAsStream( "/org/pentaho/di/repository/repositories.xml" ); if ( is != null ) { doc = db.parse( is ); } else { throw new KettleException( BaseMessages.getString( PKG, "RepositoryMeta.Error.OpeningFile", file .getAbsoluteFile() ), ef ); } } parseRepositoriesDoc( doc ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "RepositoryMeta.Error.ReadingInfo" ), e ); } return true; } RepositoriesMeta(); void clear(); void addDatabase( DatabaseMeta ci ); void addRepository( RepositoryMeta ri ); void addDatabase( int p, DatabaseMeta ci ); void addRepository( int p, RepositoryMeta ri ); DatabaseMeta getDatabase( int i ); RepositoryMeta getRepository( int i ); void removeDatabase( int i ); void removeRepository( int i ); int nrDatabases(); int nrRepositories(); DatabaseMeta searchDatabase( String name ); RepositoryMeta searchRepository( String name ); int indexOfDatabase( DatabaseMeta di ); int indexOfRepository( RepositoryMeta ri ); RepositoryMeta findRepository( String name ); RepositoryMeta findRepositoryById( String id ); boolean readData(); void readDataFromInputStream( InputStream is ); String getXML(); void writeData(); String toString(); RepositoriesMeta clone(); String getErrorMessage(); LogChannelInterface getLog(); }
@Test public void testReadData() throws Exception { LogChannel log = mock( LogChannel.class ); when( repoMeta.getKettleUserRepositoriesFile() ).thenReturn( getClass().getResource( "repositories.xml" ).getPath() ); when( repoMeta.newLogChannel() ).thenReturn( log ); repoMeta.readData(); String repositoriesXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + Const.CR + "<repositories>" + Const.CR + " <connection>" + Const.CR + " <name>local postgres</name>" + Const.CR + " <server>localhost</server>" + Const.CR + " <type>POSTGRESQL</type>" + Const.CR + " <access>Native</access>" + Const.CR + " <database>hibernate</database>" + Const.CR + " <port>5432</port>" + Const.CR + " <username>auser</username>" + Const.CR + " <password>Encrypted 2be98afc86aa7f285bb18bd63c99dbdde</password>" + Const.CR + " <servername/>" + Const.CR + " <data_tablespace/>" + Const.CR + " <index_tablespace/>" + Const.CR + " <attributes>" + Const.CR + " <attribute><code>FORCE_IDENTIFIERS_TO_LOWERCASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>FORCE_IDENTIFIERS_TO_UPPERCASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>IS_CLUSTERED</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>PORT_NUMBER</code><attribute>5432</attribute></attribute>" + Const.CR + " <attribute><code>PRESERVE_RESERVED_WORD_CASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>QUOTE_ALL_FIELDS</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>SUPPORTS_BOOLEAN_DATA_TYPE</code><attribute>Y</attribute></attribute>" + Const.CR + " <attribute><code>SUPPORTS_TIMESTAMP_DATA_TYPE</code><attribute>Y</attribute></attribute>" + Const.CR + " <attribute><code>USE_POOLING</code><attribute>N</attribute></attribute>" + Const.CR + " </attributes>" + Const.CR + " </connection>" + Const.CR + " <repository> <id>KettleFileRepository</id>" + Const.CR + " <name>Test Repository</name>" + Const.CR + " <description>Test Repository Description</description>" + Const.CR + " <is_default>false</is_default>" + Const.CR + " <base_directory>test-repository</base_directory>" + Const.CR + " <read_only>N</read_only>" + Const.CR + " <hides_hidden_files>N</hides_hidden_files>" + Const.CR + " </repository> </repositories>" + Const.CR; assertEquals( repositoriesXml, repoMeta.getXML() ); RepositoriesMeta clone = repoMeta.clone(); assertEquals( repositoriesXml, repoMeta.getXML() ); assertNotSame( clone, repoMeta ); assertEquals( 1, repoMeta.nrRepositories() ); RepositoryMeta repository = repoMeta.getRepository( 0 ); assertEquals( "Test Repository", repository.getName() ); assertEquals( "Test Repository Description", repository.getDescription() ); assertEquals( " <repository> <id>KettleFileRepository</id>" + Const.CR + " <name>Test Repository</name>" + Const.CR + " <description>Test Repository Description</description>" + Const.CR + " <is_default>false</is_default>" + Const.CR + " <base_directory>test-repository</base_directory>" + Const.CR + " <read_only>N</read_only>" + Const.CR + " <hides_hidden_files>N</hides_hidden_files>" + Const.CR + " </repository>", repository.getXML() ); assertSame( repository, repoMeta.searchRepository( "Test Repository" ) ); assertSame( repository, repoMeta.findRepositoryById( "KettleFileRepository" ) ); assertSame( repository, repoMeta.findRepository( "Test Repository" ) ); assertNull( repoMeta.findRepository( "not found" ) ); assertNull( repoMeta.findRepositoryById( "not found" ) ); assertEquals( 0, repoMeta.indexOfRepository( repository ) ); repoMeta.removeRepository( 0 ); assertEquals( 0, repoMeta.nrRepositories() ); assertNull( repoMeta.searchRepository( "Test Repository" ) ); repoMeta.addRepository( 0, repository ); assertEquals( 1, repoMeta.nrRepositories() ); repoMeta.removeRepository( 1 ); assertEquals( 1, repoMeta.nrRepositories() ); assertEquals( 1, repoMeta.nrDatabases() ); assertEquals( "local postgres", repoMeta.getDatabase( 0 ).getName() ); DatabaseMeta searchDatabase = repoMeta.searchDatabase( "local postgres" ); assertSame( searchDatabase, repoMeta.getDatabase( 0 ) ); assertEquals( 0, repoMeta.indexOfDatabase( searchDatabase ) ); repoMeta.removeDatabase( 0 ); assertEquals( 0, repoMeta.nrDatabases() ); assertNull( repoMeta.searchDatabase( "local postgres" ) ); repoMeta.addDatabase( 0, searchDatabase ); assertEquals( 1, repoMeta.nrDatabases() ); repoMeta.removeDatabase( 1 ); assertEquals( 1, repoMeta.nrDatabases() ); assertEquals( "Unable to read repository with id [junk]. RepositoryMeta is not available.", repoMeta.getErrorMessage() ); }
DataHandler extends AbstractXulEventHandler { public void editOptions( int index ) { if ( index + 1 == optionsParameterTree.getRows() ) { Object[][] values = optionsParameterTree.getValues(); Object[] row = values[values.length - 1]; if ( row != null && ( !StringUtils.isEmpty( (String) row[0] ) || !StringUtils.isEmpty( (String) row[1] ) ) ) { XulTreeRow newRow = optionsParameterTree.getRootChildren().addNewRow(); newRow.addCellText( 0, "" ); newRow.addCellText( 1, "" ); } } } DataHandler(); void loadConnectionData(); void loadAccessData(); void editOptions( int index ); void clearOptionsData(); void getOptionHelp(); void setDeckChildIndex(); void onPoolingCheck(); void onClusterCheck(); Object getData(); void setData( Object data ); void pushCache(); void popCache(); void onCancel(); void onOK(); void testDatabaseConnection(); void restoreDefaults(); void poolingRowChange( int idx ); void disablePortIfInstancePopulated(); void handleUseSecurityCheckbox(); static final SortedMap<String, DatabaseInterface> connectionMap; static final Map<String, String> connectionNametoID; }
@Test public void testEditOptions() throws Exception { }
PermissionsCheckboxHandler { public void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ) { updateCheckboxes( false, permissionEnumSet ); } PermissionsCheckboxHandler( XulCheckbox readCheckbox, XulCheckbox writeCheckbox, XulCheckbox deleteCheckbox, XulCheckbox manageCheckbox ); EnumSet<RepositoryFilePermission> processCheckboxes(); EnumSet<RepositoryFilePermission> processCheckboxes( boolean enableAppropriate ); void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ); void updateCheckboxes( boolean enableAppropriate, EnumSet<RepositoryFilePermission> permissionEnumSet ); void setAllChecked( boolean value ); void setAllDisabled( boolean value ); }
@Test public void testUpdateCheckboxesNoPermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.noneOf( RepositoryFilePermission.class ) ); verify( readCheckbox, times( 1 ) ).setChecked( false ); verify( writeCheckbox, times( 1 ) ).setChecked( false ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); verify( readCheckbox, times( 1 ) ).setDisabled( false ); } @Test public void testUpdateCheckboxesNoPermissionsAppropriateFalse() { permissionsCheckboxHandler.updateCheckboxes( false, EnumSet.noneOf( RepositoryFilePermission.class ) ); verify( readCheckbox, times( 1 ) ).setChecked( false ); verify( writeCheckbox, times( 1 ) ).setChecked( false ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); verify( readCheckbox, never() ).setDisabled( false ); } @Test public void testUpdateCheckboxesReadPermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.of( RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( false ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( false ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testUpdateCheckboxesReadPermissionsAppropriateFalse() { permissionsCheckboxHandler.updateCheckboxes( false, EnumSet.of( RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( false ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testUpdateCheckboxesWritePermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.of( RepositoryFilePermission.WRITE, RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( false ); verify( deleteCheckbox, times( 1 ) ).setDisabled( false ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testUpdateCheckboxesWritePermissionsAppropriateFalse() { permissionsCheckboxHandler.updateCheckboxes( false, EnumSet.of( RepositoryFilePermission.WRITE, RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testUpdateCheckboxesDeletePermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.of( RepositoryFilePermission.DELETE, RepositoryFilePermission.WRITE, RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( true ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( false ); verify( manageCheckbox, times( 1 ) ).setDisabled( false ); } @Test public void testUpdateCheckboxesDeletePermissionsAppropriateFalse() { permissionsCheckboxHandler.updateCheckboxes( false, EnumSet.of( RepositoryFilePermission.DELETE, RepositoryFilePermission.WRITE, RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( true ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testUpdateCheckboxesManagePermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.of( RepositoryFilePermission.ACL_MANAGEMENT, RepositoryFilePermission.DELETE, RepositoryFilePermission.WRITE, RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( true ); verify( manageCheckbox, times( 1 ) ).setChecked( true ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( false ); } @Test public void testUpdateCheckboxesManagePermissionsAppropriateFalse() { permissionsCheckboxHandler.updateCheckboxes( false, EnumSet.of( RepositoryFilePermission.ACL_MANAGEMENT, RepositoryFilePermission.DELETE, RepositoryFilePermission.WRITE, RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( true ); verify( manageCheckbox, times( 1 ) ).setChecked( true ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); } @Test public void testUpdateCheckboxesAllPermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.of( RepositoryFilePermission.ALL ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( true ); verify( manageCheckbox, times( 1 ) ).setChecked( true ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( false ); } @Test public void testUpdateCheckboxesAllPermissionsAppropriateFalse() { permissionsCheckboxHandler.updateCheckboxes( false, EnumSet.of( RepositoryFilePermission.ALL ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( true ); verify( manageCheckbox, times( 1 ) ).setChecked( true ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); }
RepositoriesMeta { public void readDataFromInputStream( InputStream is ) throws KettleException { clear(); if ( log.isBasic() ) { log.logBasic( BaseMessages.getString( PKG, "RepositoryMeta.Log.ReadingXMLFile", "FromInputStream" ) ); } try { DocumentBuilderFactory dbf = XMLParserFactoryProducer.createSecureDocBuilderFactory(); DocumentBuilder db = dbf.newDocumentBuilder(); Document doc = db.parse( is ); parseRepositoriesDoc( doc ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "RepositoryMeta.Error.ReadingInfo" ), e ); } } RepositoriesMeta(); void clear(); void addDatabase( DatabaseMeta ci ); void addRepository( RepositoryMeta ri ); void addDatabase( int p, DatabaseMeta ci ); void addRepository( int p, RepositoryMeta ri ); DatabaseMeta getDatabase( int i ); RepositoryMeta getRepository( int i ); void removeDatabase( int i ); void removeRepository( int i ); int nrDatabases(); int nrRepositories(); DatabaseMeta searchDatabase( String name ); RepositoryMeta searchRepository( String name ); int indexOfDatabase( DatabaseMeta di ); int indexOfRepository( RepositoryMeta ri ); RepositoryMeta findRepository( String name ); RepositoryMeta findRepositoryById( String id ); boolean readData(); void readDataFromInputStream( InputStream is ); String getXML(); void writeData(); String toString(); RepositoriesMeta clone(); String getErrorMessage(); LogChannelInterface getLog(); }
@Test public void testReadDataFromInputStream() throws Exception { InputStream inputStream = getClass().getResourceAsStream( "repositories.xml" ); repoMeta.readDataFromInputStream( inputStream ); assertEquals( 1, repoMeta.nrDatabases() ); assertEquals( 1, repoMeta.nrRepositories() ); } @Test public void testErrorReadingInputStream() throws Exception { try { repoMeta.readDataFromInputStream( getClass().getResourceAsStream( "filedoesnotexist.xml" ) ); } catch ( KettleException e ) { assertEquals( Const.CR + "Error reading information from file:" + Const.CR + "InputStream cannot be null" + Const.CR, e.getMessage() ); } } @Test( expected = KettleException.class ) public void exceptionThrownWhenParsingXmlWithBigAmountOfExternalEntitiesFromInputStream() throws Exception { repoMeta.readDataFromInputStream( new ByteArrayInputStream( XXEUtils.MALICIOUS_XML.getBytes() ) ); }
StreamToTransNodeConverter implements Converter { public void convertPostRepoSave( RepositoryFile repositoryFile ) { if ( repositoryFile != null ) { try { Repository repo = connectToRepository(); if ( repo != null ) { TransMeta transMeta = repo.loadTransformation( new StringObjectId( repositoryFile.getId().toString() ), null ); ExtensionPointHandler.callExtensionPoint( new LogChannel( this ), KettleExtensionPoint.TransImportAfterSaveToRepo.id, transMeta ); } } catch ( Exception e ) { logger.error( KettleExtensionPoint.TransImportAfterSaveToRepo.id, e ); } } } StreamToTransNodeConverter( IUnifiedRepository unifiedRepository ); InputStream convert( final IRepositoryFileData data ); InputStream convert( final Serializable fileId ); IRepositoryFileData convert( final InputStream inputStream, final String charset, final String mimeType ); void convertPostRepoSave( RepositoryFile repositoryFile ); }
@Test public void convertPostRepoSave() throws Exception { StreamToTransNodeConverter converter = mock( StreamToTransNodeConverter.class ); doCallRealMethod().when( converter ).convertPostRepoSave( any( RepositoryFile.class ) ); Repository repository = mock( Repository.class ); when( converter.connectToRepository() ).thenReturn( repository ); TransMeta transMeta = mock( TransMeta.class ); when( repository.loadTransformation( any(), anyString() ) ).thenReturn( transMeta ); RepositoryFile file = mock( RepositoryFile.class ); when( file.getId() ).thenReturn( "fileId" ); PluginMockInterface pluginInterface = mock( PluginMockInterface.class ); when( pluginInterface.getName() ).thenReturn( KettleExtensionPoint.TransImportAfterSaveToRepo.id ); when( pluginInterface.getMainType() ).thenReturn( (Class) ExtensionPointInterface.class ); when( pluginInterface.getIds() ).thenReturn( new String[] {"extensionpointId"} ); ExtensionPointInterface extensionPoint = mock( ExtensionPointInterface.class ); when( pluginInterface.loadClass( ExtensionPointInterface.class ) ).thenReturn( extensionPoint ); PluginRegistry.addPluginType( ExtensionPointPluginType.getInstance() ); PluginRegistry.getInstance().registerPlugin( ExtensionPointPluginType.class, pluginInterface ); converter.convertPostRepoSave( file ); verify( extensionPoint, times( 1 ) ).callExtensionPoint( any( LogChannelInterface.class ), same( transMeta ) ); }
PDIImportUtil { public static Repository connectToRepository( String repositoryName ) throws KettleException { return repositoryFactory.connect( repositoryName ); } static Repository connectToRepository( String repositoryName ); static void setRepositoryFactory( IRepositoryFactory factory ); static Document loadXMLFrom( String xml ); static Document loadXMLFrom( InputStream is ); static String asXml( Document document ); }
@Test public void testConnectToRepository() throws Exception { IRepositoryFactory mock = mock( IRepositoryFactory.class ); PDIImportUtil.setRepositoryFactory( mock ); PDIImportUtil.connectToRepository( "foo" ); verify( mock, times( 1 ) ).connect( "foo" ); }
PDIImportUtil { public static Document loadXMLFrom( String xml ) throws SAXException, IOException { return loadXMLFrom( new ByteArrayInputStream( xml.getBytes() ) ); } static Repository connectToRepository( String repositoryName ); static void setRepositoryFactory( IRepositoryFactory factory ); static Document loadXMLFrom( String xml ); static Document loadXMLFrom( InputStream is ); static String asXml( Document document ); }
@Test( timeout = 2000 ) public void whenLoadingMaliciousXmlFromStringParsingEndsWithNoErrorAndNullValueIsReturned() throws Exception { assertNull( PDIImportUtil.loadXMLFrom( MALICIOUS_XML ) ); } @Test( timeout = 2000 ) public void whenLoadingMaliciousXmlFromInputStreamParsingEndsWithNoErrorAndNullValueIsReturned() throws Exception { assertNull( PDIImportUtil.loadXMLFrom( MALICIOUS_XML ) ); } @Test public void whenLoadingLegalXmlFromStringNotNullDocumentIsReturned() throws Exception { final String trans = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<transformation>" + "</transformation>"; assertNotNull( PDIImportUtil.loadXMLFrom( trans ) ); }
RepositorySyncWebService implements IRepositorySyncWebService, Serializable { public RepositorySyncStatus sync( String repositoryId, String repositoryUrl ) throws RepositorySyncException { boolean singleDiServerInstance = "true".equals( PentahoSystem.getSystemSetting( SINGLE_DI_SERVER_INSTANCE, "true" ) ); if ( singleDiServerInstance ) { return RepositorySyncStatus.SINGLE_DI_SERVER_INSTANCE; } RepositoriesMeta repositoriesMeta = new RepositoriesMeta(); try { repositoriesMeta.readData(); } catch ( Exception e ) { log.error( Messages.getInstance().getString( "RepositorySyncWebService.UNABLE_TO_READ_DATA" ), e ); throw new RepositorySyncException( Messages.getInstance().getString( "RepositorySyncWebService.UNABLE_TO_READ_DATA" ), e ); } RepositoryMeta repositoryMeta = repositoriesMeta.findRepository( repositoryId ); if ( repositoryMeta == null ) { try { repositoryMeta = getRepositoryMeta( repositoryId, repositoryUrl ); if ( repositoryMeta == null ) { log.error( Messages.getInstance().getString( "RepositorySyncWebService.UNABLE_TO_LOAD_PLUGIN" ) ); throw new RepositorySyncException( Messages.getInstance().getString( "RepositorySyncWebService.UNABLE_TO_LOAD_PLUGIN" ) ); } repositoriesMeta.addRepository( repositoryMeta ); repositoriesMeta.writeData(); return RepositorySyncStatus.REGISTERED; } catch ( KettleException e ) { log.error( Messages.getInstance().getString( "RepositorySyncWebService.UNABLE_TO_REGISTER_REPOSITORY", repositoryId ), e ); throw new RepositorySyncException( Messages.getInstance().getString( "RepositorySyncWebService.UNABLE_TO_REGISTER_REPOSITORY", repositoryId ), e ); } } else { String xml = repositoryMeta.getXML(); Element node; try { node = XMLParserFactoryProducer.createSecureDocBuilderFactory().newDocumentBuilder().parse( new StringBufferInputStream( xml ) ) .getDocumentElement(); } catch ( Exception e ) { node = null; } if ( node != null ) { NodeList list = node.getElementsByTagName( "repository_location_url" ); if ( list != null && list.getLength() == 1 ) { String url = list.item( 0 ).getTextContent(); if ( url.equals( repositoryUrl ) ) { String fullyQualifiedServerUrl = null; if ( PentahoSystem.getApplicationContext().getFullyQualifiedServerURL() != null ) { fullyQualifiedServerUrl = PentahoSystem.getApplicationContext().getFullyQualifiedServerURL(); if ( url.endsWith( "/" ) ) { url = url.substring( 0, url.length() - 2 ); } if ( fullyQualifiedServerUrl.endsWith( "/" ) ) { fullyQualifiedServerUrl = fullyQualifiedServerUrl.substring( 0, fullyQualifiedServerUrl.length() - 2 ); } if ( url.startsWith( fullyQualifiedServerUrl ) ) { return RepositorySyncStatus.ALREADY_REGISTERED; } } log.error( Messages.getInstance().getString( "RepositorySyncWebService.FULLY_QUALIFIED_SERVER_URL_SYNC_PROBLEM", fullyQualifiedServerUrl, url ) ); throw new RepositorySyncException( Messages.getInstance().getString( "RepositorySyncWebService.FULLY_QUALIFIED_SERVER_URL_SYNC_PROBLEM", fullyQualifiedServerUrl, url ) ); } else { log.error( Messages.getInstance().getString( "RepositorySyncWebService.REPOSITORY_URL_SYNC_PROBLEM", repositoryId, url, repositoryUrl ) ); throw new RepositorySyncException( Messages.getInstance().getString( "RepositorySyncWebService.REPOSITORY_URL_SYNC_PROBLEM", repositoryId, url, repositoryUrl ) ); } } } log.error( Messages.getInstance().getString( "RepositorySyncWebService.REPOSITORY_URL_XML_PARSING_PROBLEM", repositoryId, xml ) ); throw new RepositorySyncException( Messages.getInstance().getString( "RepositorySyncWebService.REPOSITORY_URL_XML_PARSING_PROBLEM_CLIENT_MESSAGE", repositoryId ) ); } } RepositorySyncStatus sync( String repositoryId, String repositoryUrl ); }
@Test public void testSyncWebService() throws Exception { KettleEnvironment.init( false ); BasePropertyHandler.getInstance().notify( new TestPropertyHandler() ); File f = new File( Const.getKettleDirectory() ); f.mkdirs(); PentahoSystem.registerObjectFactory( new SimpleObjectFactory() ); PentahoSystem.init( new TestAppContext(), null ); PentahoSystem.setSystemSettingsService( new ISystemSettings() { public String getSystemCfgSourceName() { return null; } public String getSystemSetting( String arg0, String arg1 ) { if ( "singleDiServerInstance".equals( arg0 ) ) { return "false"; } return arg1; } public String getSystemSetting( String arg0, String arg1, String arg2 ) { return null; } public List getSystemSettings( String arg0 ) { return null; } public List getSystemSettings( String arg0, String arg1 ) { return null; } public Document getSystemSettingsDocument( String arg0 ) { return null; } public Properties getSystemSettingsProperties( String arg0 ) { return null; } public void resetSettingsCache() { } } ); IRepositorySyncWebService webservice = getRepositorySyncWebService(); try { webservice.sync( "test id", "http: Assert.fail(); } catch ( RepositorySyncException e ) { Assert.assertTrue( e.getMessage().indexOf( "unable to load the PentahoEnterpriseRepository plugin" ) >= 0 ); } RepositoryPluginType.getInstance().registerCustom( TestRepositoryMeta.class, "PentahoEnterpriseRepository", "PentahoEnterpriseRepository", "PentahoEnterpriseRepository", "PentahoEnterpriseRepository", "" ); PluginRegistry.getInstance().getPlugin( RepositoryPluginType.class, "PentahoEnterpriseRepository" ).getClassMap() .put( RepositoryMeta.class, "com.pentaho.pdi.ws.RepositorySyncWebServiceTest$TestRepositoryMeta" ); RepositorySyncStatus status = webservice.sync( "test id", "http: Assert.assertEquals( RepositorySyncStatus.REGISTERED, status ); status = webservice.sync( "test id", "http: Assert.assertEquals( RepositorySyncStatus.ALREADY_REGISTERED, status ); try { webservice.sync( "test id", "http: Assert.fail(); } catch ( RepositorySyncException e ) { Assert.assertTrue( e.getMessage().indexOf( "with the URL:" ) >= 0 ); } fullyQualifiedServerUrl = "http: try { webservice.sync( "test id", "http: Assert.fail(); } catch ( RepositorySyncException e ) { Assert.assertTrue( e.getMessage().indexOf( "fully qualified server url" ) >= 0 ); } }
Messages extends MessagesBase { private Messages() { super( BUNDLE_NAME ); } private Messages(); static Messages getInstance(); }
@Test public void testMessages() { assertEquals( "Wrong message returned", "test message 1", Messages.getInstance().getString( "test.MESSAGE1" ) ); assertEquals( "Wrong message returned", "test message 2: A", Messages.getInstance().getString( "test.MESSAGE2", "A" ) ); assertEquals( "Wrong message returned", "test message 3: A B", Messages.getInstance().getString( "test.MESSAGE3", "A", "B" ) ); assertEquals( "Wrong message returned", "test message 4: A B C", Messages.getInstance().getString( "test.MESSAGE4", "A", "B", "C" ) ); assertEquals( "Wrong message returned", "test message 5: A B C D", Messages.getInstance().getString( "test.MESSAGE5", "A", "B", "C", "D" ) ); }
Messages extends MessagesBase { public static Messages getInstance() { return instance; } private Messages(); static Messages getInstance(); }
@Test public void testErrorMessages() { assertEquals( "Wrong message returned", "test.ERROR_0001 - test error 1", Messages.getInstance().getErrorString( "test.ERROR_0001_TEST_ERROR1" ) ); assertEquals( "Wrong message returned", "test.ERROR_0002 - test error 2: A", Messages.getInstance().getErrorString( "test.ERROR_0002_TEST_ERROR2", "A" ) ); assertEquals( "Wrong message returned", "test.ERROR_0003 - test error 3: A B", Messages.getInstance() .getErrorString( "test.ERROR_0003_TEST_ERROR3", "A", "B" ) ); assertEquals( "Wrong message returned", "test.ERROR_0004 - test error 4: A B C", Messages.getInstance() .getErrorString( "test.ERROR_0004_TEST_ERROR4", "A", "B", "C" ) ); } @Test public void testBadKey() { assertEquals( "Wrong message returned", "!bogus key!", Messages.getInstance().getString( "bogus key" ) ); assertEquals( "Wrong message returned", "test.ERROR_0001 - !test.ERROR_0001_BOGUS!", Messages.getInstance().getErrorString( "test.ERROR_0001_BOGUS" ) ); } @Test public void testEncoding() { assertEquals( "Wrong message returned", "", Messages.getInstance().getEncodedString( null ) ); assertEquals( "Wrong message returned", "test: &#x81; &#x99;", Messages.getInstance().getXslString( "test.encode1" ) ); } @Test public void test() { try { Constructor<Messages> constructor = Messages.class.getDeclaredConstructor(); assertTrue( Modifier.isPrivate( constructor.getModifiers() ) ); } catch ( Exception e ) { fail( Messages.class.getSimpleName() + " Does not have a private constructor " ); } assertNotNull( Messages.getInstance() ); } @Test public void testGetInstance() { Messages instance = Messages.getInstance(); assertNotNull( instance ); } @Test public void testGetInstance() throws Exception { assertNotNull( Messages.getInstance() ); }