id
stringlengths 22
25
| commit_message
stringlengths 137
6.96k
| diffs
listlengths 0
63
|
|---|---|---|
derby-DERBY-4398-108305bc
|
DERBY-4398 Allow OFFSET/FETCH in subqueries
Patch derby-4398-3, which implements this feature, and also adds test
cases for this to the now renamed test
OrderByAndOffsetFetchInSubqueries and OffsetFetchNextTest.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@897934 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/CreateViewNode.java",
"hunks": [
{
"added": [
" private ValueNode offset;",
" private ValueNode fetchFirst;"
],
"header": "@@ -69,6 +69,8 @@ public class CreateViewNode extends DDLStatementNode",
"removed": []
},
{
"added": [
" * @param offset OFFSET if any, or null",
" * @param fetchFirst FETCH FIRST if any, or null"
],
"header": "@@ -81,6 +83,8 @@ public class CreateViewNode extends DDLStatementNode",
"removed": []
},
{
"added": [
" Object orderCols,",
" Object offset,",
" Object fetchFirst)"
],
"header": "@@ -90,7 +94,9 @@ public class CreateViewNode extends DDLStatementNode",
"removed": [
"\t\t\t\t Object orderCols)"
]
},
{
"added": [
" this.offset = (ValueNode)offset;",
" this.fetchFirst = (ValueNode)fetchFirst;"
],
"header": "@@ -99,6 +105,8 @@ public class CreateViewNode extends DDLStatementNode",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/CursorNode.java",
"hunks": [
{
"added": [
" bindOffsetFetch(offset, fetchFirst);"
],
"header": "@@ -280,7 +280,7 @@ public class CursorNode extends DMLStatementNode",
"removed": [
"\t\tbindOffsetFetch();"
]
},
{
"added": [],
"header": "@@ -364,46 +364,6 @@ public class CursorNode extends DMLStatementNode",
"removed": [
"\tprivate void bindOffsetFetch() throws StandardException {",
"",
"\t\tif (offset instanceof ConstantNode) {",
"\t\t\tDataValueDescriptor dvd = ((ConstantNode)offset).getValue();",
"\t\t\tlong val = dvd.getLong();",
"",
"\t\t\tif (val < 0) {",
"\t\t\t\tthrow StandardException.newException(",
"\t\t\t\t\tSQLState.LANG_INVALID_ROW_COUNT_OFFSET,",
"\t\t\t\t\tLong.toString(val) );",
"\t\t\t}",
"\t\t} else if (offset instanceof ParameterNode) {",
"\t\t\toffset.",
"\t\t\t\tsetType(new DataTypeDescriptor(",
"\t\t\t\t\t\t\tTypeId.getBuiltInTypeId(Types.BIGINT),",
"\t\t\t\t\t\t\tfalse /* ignored tho; ends up nullable,",
"\t\t\t\t\t\t\t\t\t so we test for NULL at execute time */));",
"\t\t}",
"",
"",
"\t\tif (fetchFirst instanceof ConstantNode) {",
"\t\t\tDataValueDescriptor dvd = ((ConstantNode)fetchFirst).getValue();",
"\t\t\tlong val = dvd.getLong();",
"",
"\t\t\tif (val < 1) {",
"\t\t\t\tthrow StandardException.newException(",
"\t\t\t\t\tSQLState.LANG_INVALID_ROW_COUNT_FIRST,",
"\t\t\t\t\tLong.toString(val) );",
"\t\t\t}",
"\t\t} else if (fetchFirst instanceof ParameterNode) {",
"\t\t\tfetchFirst.",
"\t\t\t\tsetType(new DataTypeDescriptor(",
"\t\t\t\t\t\t\tTypeId.getBuiltInTypeId(Types.BIGINT),",
"\t\t\t\t\t\t\tfalse /* ignored tho; ends up nullable,",
"\t\t\t\t\t\t\t\t\t so we test for NULL at execute time*/));",
"\t\t}",
"\t}",
"",
"",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/DMLStatementNode.java",
"hunks": [
{
"added": [],
"header": "@@ -298,22 +298,6 @@ abstract class DMLStatementNode extends StatementNode",
"removed": [
"\t{",
"\t\toptimizeStatement(null, null);",
"\t}",
"",
"\t/**",
"\t * This overload variant of optimizeStatement is used by subclass",
"\t * CursorNode (as well as a minion for the no-arg variant).",
"\t *",
"\t * @param offset Any OFFSET row count, or null",
"\t * @param fetchFirst Any FETCH FIRST row count or null",
"\t *",
"\t * @exception StandardException\t\tThrown on error",
"\t * @see DMLStatementNode#optimizeStatement()",
"\t */",
"\tprotected void optimizeStatement(ValueNode offset, ValueNode fetchFirst)",
"\t\t\tthrows StandardException"
]
},
{
"added": [],
"header": "@@ -332,25 +316,6 @@ abstract class DMLStatementNode extends StatementNode",
"removed": [
"\t\t// Any OFFSET/FETCH FIRST narrowing must be done *after* any rewrite of",
"\t\t// the query tree (if not, underlying GROUP BY fails), but *before* the",
"\t\t// final scroll insensitive result node set is added - that one needs",
"\t\t// to sit on top - so now is the time.",
"\t\t// ",
"\t\t// This example statement fails if we wrap *before* the optimization",
"\t\t// above:",
"\t\t// select max(a) from t1 group by b fetch first row only",
"\t\t//",
"\t\t// A java.sql.ResultSet#previous on a scrollable result set will fail",
"\t\t// if we don't wrap *after* the ScrollInsensitiveResultSetNode below.",
"\t\t//",
"\t\t// We need only wrap the RowCountNode set if at least one of the",
"\t\t// clauses is present.",
"\t\t",
"\t\tif (offset != null || fetchFirst != null) {",
"\t\t\tresultSet = wrapRowCountNode(resultSet, offset, fetchFirst);",
"\t\t}",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/FromBaseTable.java",
"hunks": [
{
"added": [],
"header": "@@ -2243,8 +2243,6 @@ public class FromBaseTable extends FromTable",
"removed": [
"\t\t\t\tOrderByList orderByList = cvn.getOrderByList();",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/FromSubquery.java",
"hunks": [
{
"added": [
" private ValueNode offset;",
" private ValueNode fetchFirst;"
],
"header": "@@ -47,6 +47,8 @@ public class FromSubquery extends FromTable",
"removed": []
},
{
"added": [
" * @param offset OFFSET if any, or null",
" * @param fetchFirst FETCH FIRST if any, or null"
],
"header": "@@ -59,6 +61,8 @@ public class FromSubquery extends FromTable",
"removed": []
},
{
"added": [
" Object offset,",
" Object fetchFirst,"
],
"header": "@@ -66,6 +70,8 @@ public class FromSubquery extends FromTable",
"removed": []
},
{
"added": [
" this.offset = (ValueNode)offset;",
" this.fetchFirst = (ValueNode)fetchFirst;"
],
"header": "@@ -73,6 +79,8 @@ public class FromSubquery extends FromTable",
"removed": []
},
{
"added": [
" if (orderByList != null)",
" {",
" printLabel(depth, \"orderByList: \");",
" orderByList.treePrint(depth + 1);",
" }",
"",
" if (offset != null)",
" {",
" printLabel(depth, \"offset: \");",
" offset.treePrint(depth + 1);",
" }",
"",
" if (fetchFirst != null)",
" {",
" printLabel(depth, \"fetchFirst: \");",
" fetchFirst.treePrint(depth + 1);",
" }",
" }"
],
"header": "@@ -94,12 +102,24 @@ public class FromSubquery extends FromTable",
"removed": [
"\t\t\tif (orderByList != null)",
"\t\t\t{",
"\t\t\t\tprintLabel(depth, \"orderByList: \");",
"\t\t\t\torderByList.treePrint(depth + 1);",
"\t\t\t}",
"\t\t}"
]
},
{
"added": [
" bindOffsetFetch(offset, fetchFirst);",
""
],
"header": "@@ -236,6 +256,8 @@ public class FromSubquery extends FromTable",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/InsertNode.java",
"hunks": [
{
"added": [
" private ValueNode offset;",
" private ValueNode fetchFirst;",
""
],
"header": "@@ -111,7 +111,9 @@ public final class InsertNode extends DMLModStatementNode",
"removed": [
"\t"
]
},
{
"added": [
" Object orderByList,",
" Object offset,",
" Object fetchFirst)"
],
"header": "@@ -134,7 +136,9 @@ public final class InsertNode extends DMLModStatementNode",
"removed": [
" Object orderByList)"
]
},
{
"added": [
" this.offset = (ValueNode)offset;",
" this.fetchFirst = (ValueNode)fetchFirst;"
],
"header": "@@ -149,6 +153,8 @@ public final class InsertNode extends DMLModStatementNode",
"removed": []
},
{
"added": [
" bindOffsetFetch(offset, fetchFirst);",
""
],
"header": "@@ -443,6 +449,8 @@ public final class InsertNode extends DMLModStatementNode",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ResultSetNode.java",
"hunks": [
{
"added": [
"",
" /**",
" * Push down the offset and fetch first parameters, if any. This method",
" * should be overridden by the result sets that need this.",
" *",
" * @param offset the OFFSET, if any",
" * @param fetchFirst the OFFSET FIRST, if any",
" */",
" void pushOffsetFetchFirst(ValueNode offset, ValueNode fetchFirst)",
" {",
" if (SanityManager.DEBUG)",
" {",
" SanityManager.THROWASSERT(",
" \"pushOffsetFetchFirst() not expected to be called for \" +",
" getClass().getName());",
" }",
" }",
""
],
"header": "@@ -1686,6 +1686,24 @@ public abstract class ResultSetNode extends QueryTreeNode",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/RowResultSetNode.java",
"hunks": [
{
"added": [
" ValueNode offset; // OFFSET n ROWS",
" ValueNode fetchFirst; // FETCH FIRST n ROWS ONLY"
],
"header": "@@ -61,6 +61,8 @@ public class RowResultSetNode extends FromTable",
"removed": []
},
{
"added": [
" /**",
" * Push down the offset and fetch first parameters, if any, to this node.",
" *",
" * @param offset the OFFSET, if any",
" * @param fetchFirst the OFFSET FIRST, if any",
" */",
" void pushOffsetFetchFirst(ValueNode offset, ValueNode fetchFirst)",
" {",
" this.offset = offset;",
" this.fetchFirst = fetchFirst;",
" }",
"",
"",
" /**"
],
"header": "@@ -370,7 +372,20 @@ public class RowResultSetNode extends FromTable",
"removed": [
"\t/** "
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/SelectNode.java",
"hunks": [
{
"added": [
" ValueNode offset; // OFFSET n ROWS, if given",
" ValueNode fetchFirst; // FETCH FIRST n ROWS ONLY, if given",
"",
""
],
"header": "@@ -107,6 +107,10 @@ public class SelectNode extends ResultSetNode",
"removed": []
},
{
"added": [
" /**",
" * Push down the offset and fetch first parameters to this node.",
" *",
" * @param offset the OFFSET, if any",
" * @param fetchFirst the OFFSET FIRST, if any",
" */",
" void pushOffsetFetchFirst(ValueNode offset, ValueNode fetchFirst)",
" {",
" this.offset = offset;",
" this.fetchFirst = fetchFirst;",
" }",
"",
"",
" /**"
],
"header": "@@ -916,7 +920,20 @@ public class SelectNode extends ResultSetNode",
"removed": [
"\t/** "
]
},
{
"added": [
" /* Don't flatten if selectNode has OFFSET or FETCH */",
" if ((offset != null) ||",
" (fetchFirst != null))",
" {",
" return false;",
" }",
""
],
"header": "@@ -1393,6 +1410,13 @@ public class SelectNode extends ResultSetNode",
"removed": []
},
{
"added": [
""
],
"header": "@@ -1573,7 +1597,7 @@ public class SelectNode extends ResultSetNode",
"removed": [
"\t\t\t"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/SetOperatorNode.java",
"hunks": [
{
"added": [
" ValueNode offset; // OFFSET n ROWS",
" ValueNode fetchFirst; // FETCH FIRST n ROWS ONLY"
],
"header": "@@ -61,7 +61,8 @@ abstract class SetOperatorNode extends TableOperatorNode",
"removed": [
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/SubqueryNode.java",
"hunks": [
{
"added": [
" private ValueNode offset;",
" private ValueNode fetchFirst;"
],
"header": "@@ -167,6 +167,8 @@ public class SubqueryNode extends ValueNode",
"removed": []
},
{
"added": [
" * @param offset OFFSET n ROWS",
" * @param fetchFirst FETCH FIRST n ROWS ONLY",
" Object orderCols,",
" Object offset,",
" Object fetchFirst)",
" this.offset = (ValueNode)offset;",
" this.fetchFirst = (ValueNode)fetchFirst;"
],
"header": "@@ -204,17 +206,23 @@ public class SubqueryNode extends ValueNode",
"removed": [
"\t\t\t\t\t Object orderCols)"
]
},
{
"added": [
"",
" if (offset != null)",
" {",
" printLabel(depth, \"offset: \");",
" offset.treePrint(depth + 1);",
" }",
"",
" if (fetchFirst != null)",
" {",
" printLabel(depth, \"fetchFirst: \");",
" fetchFirst.treePrint(depth + 1);",
" }"
],
"header": "@@ -280,6 +288,18 @@ public class SubqueryNode extends ValueNode",
"removed": []
},
{
"added": [
" bindOffsetFetch(offset, fetchFirst);",
""
],
"header": "@@ -528,6 +548,8 @@ public class SubqueryNode extends ValueNode",
"removed": []
},
{
"added": [
" orderByList == null &&",
" offset == null &&",
" fetchFirst == null &&"
],
"header": "@@ -644,7 +666,9 @@ public class SubqueryNode extends ValueNode",
"removed": [
"\t\t\t orderByList == null &&"
]
},
{
"added": [
" orderByList == null &&",
" offset == null &&",
" fetchFirst == null &&"
],
"header": "@@ -714,7 +738,9 @@ public class SubqueryNode extends ValueNode",
"removed": [
"\t\t\t orderByList == null &&"
]
},
{
"added": [
" resultSet.pushOffsetFetchFirst(offset, fetchFirst);",
""
],
"header": "@@ -823,6 +849,8 @@ public class SubqueryNode extends ValueNode",
"removed": []
}
]
}
] |
derby-DERBY-44-fc6efec2
|
DERBY-1014: Make tests less sensitive to pre-fetching
Attached a patch (DERBY-1014-v1.diff) which modifies tests in
preparation for DERBY-822. There are two types of changes:
1) Invoke ResultSet.next() to expose error even when the driver
isn't pre-fetching data.
2) Filter out some of the run-time statistics before comparing with
the canon.
lang/wisconsin.java:
* Added patterns to the sed file. Cursor names and some of the
run-time statistics are filtered out.
* Canons for DerbyNet and DerbyNetClient were deleted since they are
no longer needed. Each of these files is 1.5M, so the diff is
huge.
lang/scrollCursors1.sql:
* Added patterns to the sed file. Number of rows seen and number of
reads from hash table will be filtered out.
jdbcapi/parameterMetaDataJdbc30.java:
* Added a call to ResultSet.next() to provoke an error in a test
case for DERBY-44 regardless of framework/pre-fetching.
jdbcapi/resultset.java:
* Don't print column headers until one row is successfully retrieved
(achieved by calling ResultSet.next() earlier).
jdbcapi/setTransactionIsolation.java:
* Added a call to ResultSet.next() to provoke a timeout regardless
of framework/pre-fetching.
* Added patterns to the sed file. Filtered out number of pages
visited, number of rows qualified/visited etc.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@387214 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4402-97e996cf
|
DERBY-4402
Group by should not allow aggregates in it. We were able to detect "froup by (sum(j))" but not "group by (4+sum(j))". That's because we were not looking deep enough inside the group by column.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@881074 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4405-7ff4f83f
|
DERBY-4405 Transformation to inner join not performed for certain three-way joins
Patch derby-4405-2, which extends the rewriting optimization of outer
join to inner join in the presence of a compound inner table which
itself a join. The existing analysis of null intolerant predicates,
which is a condition for the rewrite, failed to look into inner tables
that were not a simple base table.
The patch adds new test cases to verify that rewrite now takes place
for such cases.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@891015 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/HalfOuterJoinNode.java",
"hunks": [
{
"added": [],
"header": "@@ -566,7 +566,6 @@ public class HalfOuterJoinNode extends JoinNode",
"removed": [
"\t\tJBitSet innerMap = new JBitSet(numTables);"
]
}
]
},
{
"file": "java/testing/org/apache/derbyTesting/junit/RuntimeStatisticsParser.java",
"hunks": [
{
"added": [
" /**",
" * @return true if a nested loop left outer join was used",
" */",
" public boolean usedNLLeftOuterJoin()",
" {",
" return (statistics.indexOf(\"Nested Loop Left Outer Join\") != -1);",
" }",
""
],
"header": "@@ -309,6 +309,14 @@ public class RuntimeStatisticsParser {",
"removed": []
}
]
}
] |
derby-DERBY-4407-fe2341f8
|
DERBY-4407: StringIndexOutOfBoundsException in ij when result has no columns
Raise a compile-time error if a query returns no columns.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@826616 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-441-93ab8302
|
DERBY-441 - Progress on clean up of published javadoc.
- Remove some unused methods in client data sources.
- Add warnigns for connectionAttributes
- remove attributesAsPassword property for EmebeddedSimpleDataSource
git-svn-id: https://svn.apache.org/repos/asf/incubator/derby/code/trunk@209473 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/client/org/apache/derby/jdbc/ClientBaseDataSource.java",
"hunks": [
{
"added": [
" ClientBaseDataSource() {"
],
"header": "@@ -60,7 +60,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected ClientBaseDataSource() {"
]
},
{
"added": [
" private transient PrintWriter logWriter;"
],
"header": "@@ -91,7 +91,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected transient PrintWriter logWriter = null;"
]
},
{
"added": [
" protected String databaseName;"
],
"header": "@@ -108,7 +108,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String databaseName = null;"
]
},
{
"added": [
" protected String description;"
],
"header": "@@ -116,7 +116,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String description = null;"
]
},
{
"added": [
" protected String dataSourceName;"
],
"header": "@@ -125,7 +125,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String dataSourceName = null;"
]
},
{
"added": [],
"header": "@@ -165,10 +165,6 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static int NOT_SET = 0; // 0 means not set.",
" public final static int YES = 1; // =\"yes\" as property string",
" public final static int NO = 2; // =\"no\" as property string",
""
]
},
{
"added": [
" protected String traceFile;"
],
"header": "@@ -238,7 +234,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String traceFile = null;"
]
},
{
"added": [
" protected String traceDirectory;"
],
"header": "@@ -249,7 +245,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String traceDirectory = null;"
]
},
{
"added": [
" protected String password;",
" synchronized public final void setPassword(String password) {",
" ",
" public final String getPassword() {",
" \treturn password;",
" }"
],
"header": "@@ -278,11 +274,15 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String password = null;",
" synchronized public void setPassword(String password) {"
]
},
{
"added": [
" \t"
],
"header": "@@ -358,6 +358,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": []
},
{
"added": [
" LogWriter computeDncLogWriterForNewConnection(String logWriterInUseSuffix) throws SqlException {",
" static LogWriter computeDncLogWriterForNewConnection(PrintWriter logWriter, String traceDirectory, String traceFile, boolean traceFileAppend, int traceLevel, String logWriterInUseSuffix, int traceFileSuffixIndex) throws SqlException {"
],
"header": "@@ -495,14 +496,14 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public LogWriter computeDncLogWriterForNewConnection(String logWriterInUseSuffix) throws SqlException {",
" static public LogWriter computeDncLogWriterForNewConnection(PrintWriter logWriter, String traceDirectory, String traceFile, boolean traceFileAppend, int traceLevel, String logWriterInUseSuffix, int traceFileSuffixIndex) throws SqlException {"
]
},
{
"added": [
" static Properties tokenizeAttributes(String attributeString, Properties properties) throws SqlException {"
],
"header": "@@ -618,7 +619,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public static Properties tokenizeAttributes(String attributeString, Properties properties) throws SqlException {"
]
},
{
"added": [
" * <BR>",
" * Any attributes that can be set using a property of this DataSource implementation",
" * (e.g user, password) should not be set in connectionAttributes. Conflicting",
" * settings in connectionAttributes and properties of the DataSource will lead to",
" * unexpected behaviour. "
],
"header": "@@ -783,6 +784,11 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": []
}
]
},
{
"file": "java/client/org/apache/derby/jdbc/ClientConnectionPoolDataSource.java",
"hunks": [
{
"added": [
" PooledConnection pooledConnection = getPooledConnectionX(dncLogWriter, this, getUser(), getPassword());"
],
"header": "@@ -47,7 +47,7 @@ public class ClientConnectionPoolDataSource extends ClientDataSource implements",
"removed": [
" PooledConnection pooledConnection = getPooledConnectionX(dncLogWriter, this, user, password);"
]
}
]
},
{
"file": "java/client/org/apache/derby/jdbc/ClientDataSource.java",
"hunks": [
{
"added": [
" return getConnection(getUser(), getPassword());"
],
"header": "@@ -132,7 +132,7 @@ public class ClientDataSource extends ClientBaseDataSource implements DataSource",
"removed": [
" return getConnection(user, password);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/jdbc/EmbeddedDataSource.java",
"hunks": [
{
"added": [
"\t<BR>",
" Any attributes that can be set using a property of this DataSource implementation",
" (e.g user, password) should not be set in connectionAttributes. Conflicting",
" settings in connectionAttributes and properties of the DataSource will lead to",
" unexpected behaviour. "
],
"header": "@@ -109,6 +109,11 @@ import org.apache.derby.impl.jdbc.Util;",
"removed": []
},
{
"added": [
"\ttransient InternalDriver driver;"
],
"header": "@@ -240,7 +245,7 @@ public class EmbeddedDataSource extends ReferenceableDataSource implements",
"removed": [
"\ttransient protected InternalDriver driver;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/jdbc/EmbeddedSimpleDataSource.java",
"hunks": [
{
"added": [],
"header": "@@ -104,11 +104,6 @@ public class EmbeddedSimpleDataSource implements DataSource {",
"removed": [
"\t/**",
"\t * Set password to be a set of connection attributes.",
"\t */",
"\tprivate boolean attributesAsPassword;",
""
]
},
{
"added": [
"\ttransient private InternalDriver driver;"
],
"header": "@@ -116,7 +111,7 @@ public class EmbeddedSimpleDataSource implements DataSource {",
"removed": [
"\ttransient protected InternalDriver driver;"
]
},
{
"added": [
"\t<BR>",
" Any attributes that can be set using a property of this DataSource implementation",
" (e.g user, password) should not be set in connectionAttributes. Conflicting",
" settings in connectionAttributes and properties of the DataSource will lead to",
" unexpected behaviour. ",
" \t * "
],
"header": "@@ -333,7 +328,12 @@ public class EmbeddedSimpleDataSource implements DataSource {",
"removed": [
"\t * "
]
},
{
"added": [],
"header": "@@ -357,30 +357,6 @@ public class EmbeddedSimpleDataSource implements DataSource {",
"removed": [
"\t/**",
"\t * Set attributeAsPassword property to enable passing connection request",
"\t * attributes in the password argument of getConnection. If the property is",
"\t * set to true then the password argument of the",
"\t * DataSource.getConnection(String user, String password) method call is",
"\t * taken to be a list of connection attributes with the same format as the",
"\t * connectionAttributes property.",
"\t * ",
"\t * @param attributesAsPassword",
"\t * true to encode password argument as a set of connection",
"\t * attributes in a connection request.",
"\t */",
"\tpublic final void setAttributesAsPassword(boolean attributesAsPassword) {",
"\t\tthis.attributesAsPassword = attributesAsPassword;",
"\t\tupdate();",
"\t}",
"",
"\t/**",
"\t * Return the value of the attributesAsPassword property.",
"\t */",
"\tpublic final boolean getAttributesAsPassword() {",
"\t\treturn attributesAsPassword;",
"\t}",
""
]
},
{
"added": [
"\t\treturn this.getConnection(getUser(), getPassword());"
],
"header": "@@ -393,7 +369,7 @@ public class EmbeddedSimpleDataSource implements DataSource {",
"removed": [
"\t\treturn this.getConnection(getUser(), getPassword(), false);"
]
},
{
"added": [
"\t\tif (password != null)",
"\t\t\tinfo.put(Attribute.PASSWORD_ATTR, password);",
"\t\tConnection conn = findDriver().connect(jdbcurl, info);"
],
"header": "@@ -413,45 +389,20 @@ public class EmbeddedSimpleDataSource implements DataSource {",
"removed": [
"\t\treturn this.getConnection(username, password, true);",
"\t}",
"",
"\t/**",
"\t * @param requestPassword",
"\t * true if the password came from the getConnection() call.",
"\t */",
"\tfinal Connection getConnection(String username, String password,",
"\t\t\tboolean requestPassword) throws SQLException {",
"\t\tif (!requestPassword || !attributesAsPassword) {",
"\t\t\tif (password != null)",
"\t\t\t\tinfo.put(Attribute.PASSWORD_ATTR, password);",
"\t\t}",
"\t\tString url = jdbcurl;",
"",
"\t\tif (attributesAsPassword && requestPassword && password != null) {",
"",
"\t\t\tStringBuffer sb = new StringBuffer(url.length() + password.length()",
"\t\t\t\t\t+ 1);",
"",
"\t\t\tsb.append(url);",
"\t\t\tsb.append(';');",
"\t\t\tsb.append(password); // these are now request attributes on the URL",
"",
"\t\t\turl = sb.toString();",
"",
"\t\t}",
"\t\tConnection conn = findDriver().connect(url, info);"
]
}
]
}
] |
derby-DERBY-4412-e7e328d6
|
DERBY-4412: Make getNegation() abstract in BinaryComparisonOperatorNode and UnaryComparisonOperatorNode
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@827035 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/BinaryComparisonOperatorNode.java",
"hunks": [
{
"added": [],
"header": "@@ -33,8 +33,6 @@ import org.apache.derby.iapi.reference.SQLState;",
"removed": [
"import org.apache.derby.iapi.services.sanity.SanityManager;",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/UnaryComparisonOperatorNode.java",
"hunks": [
{
"added": [
"public abstract class UnaryComparisonOperatorNode extends UnaryOperatorNode"
],
"header": "@@ -53,7 +53,7 @@ import java.util.Vector;",
"removed": [
"public class UnaryComparisonOperatorNode extends UnaryOperatorNode"
]
},
{
"added": [
"\tabstract UnaryOperatorNode getNegation(ValueNode operand)",
"\t\t\t\tthrows StandardException;"
],
"header": "@@ -138,18 +138,8 @@ public class UnaryComparisonOperatorNode extends UnaryOperatorNode",
"removed": [
"\tUnaryOperatorNode getNegation(ValueNode operand)",
"\t\t\t\tthrows StandardException",
"\t{",
"\t\t/* Keep the compiler happy - this method should never be called.",
"\t\t * We should always be calling the method in a sub-class.",
"\t\t */",
"\t\tif (SanityManager.DEBUG)",
"\t\tSanityManager.ASSERT(false,",
"\t\t\t\t\t\"expected to call getNegation() for subclass \" +",
"\t\t\t\t\tgetClass().toString());",
"\t\treturn this;",
"\t}"
]
}
]
}
] |
derby-DERBY-4413-9d24308d
|
DERBY-4413 INSERT from SELECT DISTINCT gives assertFailure (sane), or NPE (insane) in presence of generated columns
Patch DERBY-4413-2 makes sort accept columns that are null. In sane mode, it is checked that in such a case, the
empty column is not part of the sort key.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@829410 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/store/access/sort/MergeSort.java",
"hunks": [
{
"added": [
"",
"\t/**",
" Determine whether a column is used for ordering or not.",
"\t**/",
"\tprivate boolean isOrderingColumn[];",
""
],
"header": "@@ -126,6 +126,12 @@ class MergeSort implements Sort",
"removed": []
},
{
"added": [
"\t\t\t\t\tif (!isOrderingColumn[colid]) {",
"",
"\t\t\t\t\t\t// DERBY-4413 shows that the value for a generated",
"\t\t\t\t\t\t// column will be null as the result set is computed as",
"\t\t\t\t\t\t// part of an INSERT INTO, so accept a null also.",
"\t\t\t\t\t\t// This column would not be part of the sort key.",
"",
"\t\t\t\t\t\tcontinue;",
"",
"\t\t\t\t\t} else {",
"\t\t\t\t\t\tSanityManager.THROWASSERT(\"col[\" + colid + \"] is null\");",
"\t\t\t\t\t}"
],
"header": "@@ -455,8 +461,18 @@ class MergeSort implements Sort",
"removed": [
"\t\t\t\t\tSanityManager.THROWASSERT(",
"\t\t\t\t\t\t\"col[\" + colid + \"] is null\");"
]
}
]
}
] |
derby-DERBY-4413-e0ba78cc
|
DERBY-4413 INSERT from SELECT DISTINCT gives assertFailure (sane), or NPE (insane) in presence of generated columns
After DERBY-4442 went in, the exception to the ASSERT check we made in
the original fix for this issue should be rolled back, so as to
provide a better internal consistency check. Patch derby-4413-rollback
does this.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@885659 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/store/access/sort/MergeSort.java",
"hunks": [
{
"added": [],
"header": "@@ -126,12 +126,6 @@ class MergeSort implements Sort",
"removed": [
"",
"\t/**",
" Determine whether a column is used for ordering or not.",
"\t**/",
"\tprivate boolean isOrderingColumn[];",
""
]
},
{
"added": [
"\t\t\t\t\tSanityManager.THROWASSERT(",
"\t\t\t\t\t\t\"col[\" + colid + \"] is null\");"
],
"header": "@@ -461,18 +455,8 @@ class MergeSort implements Sort",
"removed": [
"\t\t\t\t\tif (!isOrderingColumn[colid]) {",
"",
"\t\t\t\t\t\t// DERBY-4413 shows that the value for a generated",
"\t\t\t\t\t\t// column will be null as the result set is computed as",
"\t\t\t\t\t\t// part of an INSERT INTO, so accept a null also.",
"\t\t\t\t\t\t// This column would not be part of the sort key.",
"",
"\t\t\t\t\t\tcontinue;",
"",
"\t\t\t\t\t} else {",
"\t\t\t\t\t\tSanityManager.THROWASSERT(\"col[\" + colid + \"] is null\");",
"\t\t\t\t\t}"
]
},
{
"added": [
" for (int i = 0; i < columnOrdering.length; i++)"
],
"header": "@@ -562,27 +546,13 @@ class MergeSort implements Sort",
"removed": [
"",
"\t\tif (SanityManager.DEBUG) {",
"\t\t\tisOrderingColumn = new boolean[template.length];",
"",
"\t\t\tfor (int i = 0; i < isOrderingColumn.length; i++) {",
"\t\t\t\tisOrderingColumn[i] = false;",
"\t\t\t}",
"\t\t}",
"",
"\t\tfor (int i = 0; i < columnOrdering.length; i++)",
"",
"\t\t\tif (SanityManager.DEBUG) {",
"\t\t\t\tisOrderingColumn[columnOrderingMap[i]] = true;",
"\t\t\t}",
""
]
}
]
}
] |
derby-DERBY-4415-3ff5ce3d
|
DERBY-4415: Add ability to plug customized AST printers into the compiler.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@827760 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/sql/conn/LanguageConnectionContext.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.compile.ASTVisitor;"
],
"header": "@@ -32,6 +32,7 @@ import org.apache.derby.iapi.sql.dictionary.SchemaDescriptor;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/GenericStatement.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.compile.Visitable;",
"import org.apache.derby.iapi.sql.compile.ASTVisitor;"
],
"header": "@@ -33,6 +33,8 @@ import org.apache.derby.iapi.sql.PreparedStatement;",
"removed": []
},
{
"added": [
" // Call user-written tree-printer if it exists",
" walkAST( lcc, qt, ASTVisitor.AFTER_PARSE);",
""
],
"header": "@@ -281,6 +283,9 @@ public class GenericStatement",
"removed": []
},
{
"added": [
" // Call user-written tree-printer if it exists",
" walkAST( lcc, qt, ASTVisitor.AFTER_BIND);",
""
],
"header": "@@ -319,6 +324,9 @@ public class GenericStatement",
"removed": []
},
{
"added": [
" // Call user-written tree-printer if it exists",
" walkAST( lcc, qt, ASTVisitor.AFTER_OPTIMIZE);",
""
],
"header": "@@ -374,6 +382,9 @@ public class GenericStatement",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/conn/GenericLanguageConnectionContext.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.compile.ASTVisitor;"
],
"header": "@@ -41,6 +41,7 @@ import org.apache.derby.iapi.db.Database;",
"removed": []
},
{
"added": [
" // User-written inspector to print out query tree",
" private ASTVisitor astWalker;",
" "
],
"header": "@@ -290,6 +291,9 @@ public class GenericLanguageConnectionContext",
"removed": []
},
{
"added": [
" ",
" public void setASTVisitor( ASTVisitor visitor )",
" {",
" astWalker = visitor;",
" }",
" public ASTVisitor getASTVisitor( )",
" {",
" return astWalker;",
" }",
""
],
"header": "@@ -3537,4 +3541,14 @@ public class GenericLanguageConnectionContext",
"removed": []
}
]
}
] |
derby-DERBY-4419-c6172435
|
DERBY-4425: NPE with INSERT .. SELECT .. UNION and generated expressions
This patch adds a test which confirms that DERBY-4425 is fixed. The code
change which resolved DERBY-4425 was performed as part of fixing DERBY-4419,
since the same code change fixes both symptoms, so this is a test-only patch.
subversion revision 831304 contains the DERBY-4419 code change.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@831319 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4420-7a27cabe
|
DERBY-4420: NullPointerException with INSERT INTO ... from EXCEPT/INTERSECT
The failing code in ResultSetNode.setTableConstructorTypes() was meant
to handle the case where the node represented a table constructor (aka
VALUES clause). UnionNode already had an override to make it a no-op
unless it actually represented a multi-row VALUES clause that had been
rewritten to a union of single-row VALUES clauses.
Since a VALUES clause is never rewritten to EXCEPT or INTERSECT, the
correct handling is to make setTableConstructorTypes() a no-op in
IntersectOrExceptNode. Rather than adding an empty override in
IntersectOrExceptNode, the code was moved from
ResultSetNode.setTableConstructorTypes() to
RowResultSetNode.setTableConstructorTypes(), and the default
implementation in ResultSetNode was left empty.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@832379 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ResultSetNode.java",
"hunks": [
{
"added": [
"\t * Set the type of each parameter in the result column list if this node",
"\t * represents a table constructor (aka VALUES clause). Table constructors",
"\t * are represented either by a {@code RowResultSetNode} or by a",
"\t * {@code UnionNode} with multiple {@code RowResultSetNode} children and",
"\t * whose {@code tableConstructor()} method returns {@code true}. For all",
"\t * other nodes, this method should be a no-op."
],
"header": "@@ -304,7 +304,12 @@ public abstract class ResultSetNode extends QueryTreeNode",
"removed": [
"\t * Set the type of each parameter in the result column list for this table constructor."
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/RowResultSetNode.java",
"hunks": [
{
"added": [],
"header": "@@ -21,14 +21,9 @@",
"removed": [
"import org.apache.derby.iapi.services.context.ContextManager;",
"",
"import org.apache.derby.iapi.sql.compile.CompilerContext;",
"import org.apache.derby.iapi.sql.compile.OptimizableList;",
"import org.apache.derby.iapi.sql.compile.OptimizablePredicate;"
]
},
{
"added": [
"import org.apache.derby.iapi.types.DataTypeDescriptor;",
"import org.apache.derby.iapi.types.TypeId;",
""
],
"header": "@@ -38,25 +33,22 @@ import org.apache.derby.iapi.sql.dictionary.DataDictionary;",
"removed": [
"import org.apache.derby.iapi.sql.Activation;",
"import org.apache.derby.iapi.sql.ResultSet;",
"import org.apache.derby.iapi.sql.Row;",
"import org.apache.derby.impl.sql.compile.ActivationClassBuilder;",
"import java.util.Enumeration;",
"import java.util.Properties;"
]
}
]
}
] |
derby-DERBY-4421-dd2f5a82
|
DERBY-4421: Allow visitors to process the nodes bottom-up
Added the method visitChildrenFirst() to the Visitor interface to
allow the visitor to specify whether they should walk the tree
top-down or bottom-up. Implemented the method in all existing visitors
and made it return false to preserve the current behaviour with
top-down walking.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@830154 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/BinaryListOperatorNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -375,22 +375,16 @@ public abstract class BinaryListOperatorNode extends ValueNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tVisitable\t\treturnNode = v.visit(this);",
"",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn returnNode;",
"\t\t}"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/BinaryOperatorNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -853,22 +853,16 @@ public class BinaryOperatorNode extends ValueNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tVisitable returnNode = v.visit(this);",
"\t",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn returnNode;",
"\t\t}"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ConditionalNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -718,22 +718,16 @@ public class ConditionalNode extends ValueNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tVisitable returnNode = v.visit(this);",
"\t",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn returnNode;",
"\t\t}"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/FromBaseTable.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -4587,26 +4587,17 @@ public class FromBaseTable extends FromTable",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"",
"\t Visitable returnNode = super.accept(v);",
"",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn returnNode;",
"\t\t}",
"",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/HashTableNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -409,22 +409,16 @@ public class HashTableNode extends SingleChildResultSetNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn v.visit(this);",
"\t\t}",
"",
"\t\tVisitable returnNode = super.accept(v);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/JoinNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -1932,22 +1932,16 @@ public class JoinNode extends TableOperatorNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn v.visit(this);",
"\t\t}",
"",
"\t\tVisitable returnNode = super.accept(v);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/MethodCallNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -1239,22 +1239,16 @@ abstract class MethodCallNode extends JavaValueNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tVisitable\t\treturnNode = v.visit(this);",
"",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn returnNode;",
"\t\t}"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ProjectRestrictNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -1812,22 +1812,16 @@ public class ProjectRestrictNode extends SingleChildResultSetNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn v.visit(this);",
"\t\t}",
"",
"\t\tVisitable returnNode = super.accept(v);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/SelectNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -2344,27 +2344,16 @@ public class SelectNode extends ResultSetNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v)",
"\t\tVisitable returnNode = v.visit(this);",
"",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn returnNode;",
"\t\t}",
"",
"\t\tif (!v.stopTraversal())",
"\t\t{",
"\t\t\tsuper.accept(v);",
"\t\t}"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/SubqueryNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);",
"\t\t\treturn;"
],
"header": "@@ -2194,29 +2194,23 @@ public class SubqueryNode extends ValueNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v)",
"\t\tVisitable returnNode = v.visit(this);",
"\t\t\treturn returnNode;",
"\t\t}",
"\t",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn returnNode;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/TableOperatorNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -946,22 +946,16 @@ abstract class TableOperatorNode extends FromTable",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn v.visit(this);",
"\t\t}",
"",
"\t\tVisitable returnNode = super.accept(v);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/TernaryOperatorNode.java",
"hunks": [
{
"added": [
"\t * Accept the visitor for all visitable children of this node.",
"\tvoid acceptChildren(Visitor v)",
"\t\tsuper.acceptChildren(v);"
],
"header": "@@ -464,22 +464,16 @@ public class TernaryOperatorNode extends ValueNode",
"removed": [
"\t * Accept a visitor, and call v.visit()",
"\t * on child nodes as necessary. ",
"\tpublic Visitable accept(Visitor v) ",
"\t\tVisitable returnNode = v.visit(this);",
"\t",
"\t\tif (v.skipChildren(this))",
"\t\t{",
"\t\t\treturn returnNode;",
"\t\t}"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/VerifyAggregateExpressionsVisitor.java",
"hunks": [
{
"added": [
"",
"\tpublic boolean visitChildrenFirst(Visitable node)",
"\t{",
"\t\treturn false;",
"\t}"
],
"header": "@@ -145,4 +145,9 @@ public class VerifyAggregateExpressionsVisitor implements Visitor",
"removed": []
}
]
}
] |
derby-DERBY-4425-c6172435
|
DERBY-4425: NPE with INSERT .. SELECT .. UNION and generated expressions
This patch adds a test which confirms that DERBY-4425 is fixed. The code
change which resolved DERBY-4425 was performed as part of fixing DERBY-4419,
since the same code change fixes both symptoms, so this is a test-only patch.
subversion revision 831304 contains the DERBY-4419 code change.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@831319 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4426-c341dc44
|
DERBY-4426 With generated columns, INSERT with DEFAULT inside a VALUES clause inside a UNION fails
Patch derby-4426c, which fixes this issue. We ensure that DEFAULT is
only used in a top level VALUES clause, conformant with the standard.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@885595 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ResultColumnList.java",
"hunks": [
{
"added": [
"\t * Replace any DEFAULTs with the associated tree for the default if",
"\t * allowed, or flag.",
" * @param allowDefaults true if allowed",
"\tvoid replaceOrForbidDefaults(TableDescriptor ttd,",
" ResultColumnList tcl,",
" boolean allowDefaults)"
],
"header": "@@ -3864,14 +3864,18 @@ public class ResultColumnList extends QueryTreeNodeVector",
"removed": [
"\t * Replace any DEFAULTs with the associated tree for the default.",
"\tvoid replaceDefaults(TableDescriptor ttd, ResultColumnList tcl) "
]
}
]
}
] |
derby-DERBY-4428-18373c4f
|
DERBY-4428 (partial): Add proper delete mechanism for in-memory databases
Refactored code into getProtocolLeadIn()
Changed an ASSERT, as it expected equality between something that couldn't be
equal unless the default storage back end was used. It failed for the in-memory
back end, and would also fail for all other back ends. This code path hasn't
been enabled for other storage back ends before now (deleting the service root).
Patch file: derby-4428-3b-canonical_name_handling.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@884105 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/services/monitor/StorageFactoryService.java",
"hunks": [
{
"added": [
" return getProtocolLeadIn() + (String) AccessController.doPrivileged("
],
"header": "@@ -639,15 +639,7 @@ final class StorageFactoryService implements PersistentService",
"removed": [
"\t\t\tString protocolLeadIn = \"\";",
"\t\t\t//prepend the subsub protocol name to the storage factoty canonical",
"\t\t\t//name to form the service name except in case of the the ",
"\t\t\t//default subsubprototcol(PersistentService.DIRECTORY)",
"",
"\t\t\tif (!(getType().equals( PersistentService.DIRECTORY))) ",
"\t\t\t\tprotocolLeadIn = getType() + \":\";",
"",
" return protocolLeadIn + (String) AccessController.doPrivileged("
]
},
{
"added": [
" // Run this through getCanonicalServiceName as",
" // an extra sanity check. Prepending the",
" // protocol lead in to the canonical name from",
" // the storage factory should be enough.",
" String tmpCanonical = getCanonicalServiceName(",
" getProtocolLeadIn() +",
" storageFactory.getCanonicalName());",
" // These should give the same result.",
" SanityManager.ASSERT(",
" tmpCanonical.equals(getProtocolLeadIn()",
" + storageFactory.getCanonicalName()));",
" serviceName.equals(tmpCanonical),",
" \" ; protocolLeadIn + \" +",
" \"storageFactory.getCanoicalName = \" +",
" tmpCanonical);"
],
"header": "@@ -733,12 +725,23 @@ final class StorageFactoryService implements PersistentService",
"removed": [
" serviceName.equals(",
" storageFactory.getCanonicalName()), ",
" \";storageFactory.getCanonicalName() = \" + ",
" storageFactory.getCanonicalName());"
]
},
{
"added": [],
"header": "@@ -754,7 +757,6 @@ final class StorageFactoryService implements PersistentService",
"removed": [
"\t\tString protocolLeadIn = getType() + \":\";"
]
},
{
"added": [
" if( ! name.startsWith(getType() + \":\"))",
" return getProtocolLeadIn() + (String) AccessController.doPrivileged("
],
"header": "@@ -766,17 +768,15 @@ final class StorageFactoryService implements PersistentService",
"removed": [
" if( ! name.startsWith( protocolLeadIn))",
" if( getType().equals( PersistentService.DIRECTORY)) // The default subsubprototcol",
" protocolLeadIn = \"\";",
" return protocolLeadIn + (String) AccessController.doPrivileged("
]
},
{
"added": [
"",
" /**",
" * Returns the protocol lead in for this service.",
" *",
" * @return An empty string if the protocol is the default one",
" * (PersistentService.DIRECTORY), the subsub protocol name followed by",
" * colon otherwise.",
" */",
" private String getProtocolLeadIn() {",
" // We prepend the subsub protocol name to the storage factory canonical",
" // name to form the service name, except in case of the default",
" // subsub prototcol (which is PersistentService.DIRECTORY).",
" if (getType().equals(PersistentService.DIRECTORY)) {",
" return \"\";",
" } else {",
" return getType() + \":\";",
" }",
" }"
],
"header": "@@ -854,6 +854,24 @@ final class StorageFactoryService implements PersistentService",
"removed": []
}
]
}
] |
derby-DERBY-4428-88e75e5b
|
DERBY-4436: Refactor existing in-memory back end tests and add new tests for dropping database.
Added a new utility class to deal with in-memory databases (convenience methods, much like BaseJDBCTestCase and friends).
Adjusted existing tests to use MemoryDbManager.
Added new test DropWhileConnectingTest to study the behavior of Derby when several threads connect and/or execute queries when the database is being shut down. Written primarily to test the upcoming feature for blocking new connections while certain operations are taking place (for instance database shutdown and drop).
Note: Some features have been disabled in the committed code, awaiting the drop database functionality (DERBY-4428). Some additional changes may also be required.
Patch file: derby-4436-1b-inmem_test_changes.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@835334 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4428-b36288fd
|
DERBY-4428 (partial): Add proper delete mechanism for in-memory databases
Added the code required in the in-memory back end storage factory.
Some logic is required to remove the data store from the list of databases when
a database is dropped. Note that this logic isn't fully general purpose - it is
partly dependent on how the monitor / StorageFactoryService works.
Patch file: derby-4428-1b-in_memory_specific_delete_code.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@883297 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/io/VFMemoryStorageFactory.java",
"hunks": [
{
"added": [
" * <p>",
" * Note that data store deletion may happen inside one of two different methods;",
" * either in {@code shutdown} or in {@code init}. This is due to the current",
" * implementation and the fact that dropping a database is done through the",
" * file IO interface by deleting the service root. As the deletion then becomes",
" * a two step process, someone else may boot the database again before the",
" * reference to the store has been removed. To avoid this, the",
" * {@code init}-method will never initialize with a store scheduled for",
" * deletion. I have only seen this issue in heavily loaded multithreaded",
" * environments (2 CPUs/cores should be enough to reproduce)."
],
"header": "@@ -38,6 +38,16 @@ import org.apache.derby.io.WritableStorageFactory;",
"removed": []
},
{
"added": [
" * TODO: Remove this method once the new mechanism has been added."
],
"header": "@@ -54,6 +64,7 @@ public class VFMemoryStorageFactory",
"removed": []
},
{
"added": [
" this.dbData = (DataStore)DATABASES.get(canonicalName);",
" // If the store has been scheduled for deletion, purge it.",
" if (dbData != null && dbData.scheduledForDeletion()) {",
" DATABASES.remove(canonicalName);",
" dbData.purge();",
" dbDropCleanupInDummy(canonicalName);",
" dbData = null;",
" }",
" if (dbData == null) {",
" if (uniqueName != null) {",
" // Create a new data store.",
" this.dbData = new DataStore(canonicalName);",
" DATABASES.put(canonicalName, dbData);",
" } else {",
" // We have a database name, but no unique name.",
" // Assume that the client only wants to do some",
" // \"book-keeping\" operations, like getting the",
" // canonical name.",
" this.dbData = DUMMY_STORE;",
" }"
],
"header": "@@ -118,19 +129,26 @@ public class VFMemoryStorageFactory",
"removed": [
" if (DATABASES.containsKey(canonicalName)) {",
" // Fetch the existing data store.",
" this.dbData = (DataStore)DATABASES.get(canonicalName);",
" } else if (uniqueName != null) {",
" // Create a new data store.",
" this.dbData = new DataStore(canonicalName);",
" DATABASES.put(canonicalName, dbData);",
" } else {",
" // We have a database name, but no unique name.",
" // Assume that the client only wants to do some",
" // \"book-keeping\" operations, like getting the",
" // canonical name.",
" this.dbData = DUMMY_STORE;"
]
},
{
"added": [
" /**",
" * Normally does nothing, but if the database is in a state such that it",
" * should be deleted this will happen here.",
" */",
" // If the data store has been scheduled for deletion, which happens",
" // when the store detects that the service root has been deleted, then",
" // delete the whole store to release the memory.",
" if (dbData.scheduledForDeletion()) {",
" DataStore store;",
" synchronized (DATABASES) {",
" store = (DataStore)DATABASES.remove(canonicalName);",
" // Must clean up the dummy while holding monitor.",
" if (store != null && store == dbData) {",
" dbDropCleanupInDummy(canonicalName);",
" }",
" }",
" // If this is the correct store, purge it now.",
" if (store != null && store == dbData) {",
" dbData.purge(); // Idempotent.",
" dbData = null;",
" }",
" }"
],
"header": "@@ -161,11 +179,29 @@ public class VFMemoryStorageFactory",
"removed": [
" // For now, do nothing.",
" // TODO: Deleting stuff doesn't seem to play nice when running the",
" // regression tests, as CleanDatabaseTestSetup fails then. The cause",
" // is unknown and should be investigated."
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/io/vfmem/DataStore.java",
"hunks": [
{
"added": [
" /**",
" * The name of the database this store serves, expected to be the absolute",
" * path of the service root (i.e. /tmp/myDB if the database myDB is created",
" * in /tmp).",
" */",
" /** Tells if this store is scheduled for deletion. */",
" private boolean deleteMe;",
" * @param databaseName the name of the assoicated database, expected to be",
" * the absolute path of the service root.",
" * Returns the database name, which is expected to equal the path of the",
" * service root."
],
"header": "@@ -56,22 +56,30 @@ public final class DataStore {",
"removed": [
" /** The name of the database this store serves. */",
" * @param databaseName the name of the assoicated database",
" * Returns the database name."
]
},
{
"added": [
" /**",
" * Tells if this data store is scheduled for deletion.",
" *",
" * @return {@code true} if the store is awaiting deletion,",
" * {@code false} otherwise.",
" */",
" public boolean scheduledForDeletion() {",
" return this.deleteMe;",
" }",
""
],
"header": "@@ -79,6 +87,16 @@ public final class DataStore {",
"removed": []
},
{
"added": [
" // Check if we just deleted the service root. Normally the",
" // service would be deleted using deleteAll.",
" if (nPath.equals(databaseName) &&",
" files.get(databaseName) == null) {",
" // Service root deleted, mark this store for removal.",
" deleteMe = true;",
" }"
],
"header": "@@ -161,6 +179,13 @@ public final class DataStore {",
"removed": []
},
{
"added": [
" boolean deleted = _deleteAll(nPath);",
" if (files.get(databaseName) == null) {",
" // The service root has been deleted, which means that all",
" // the data has been deleted. Mark this store for removal.",
" deleteMe = true;",
" }",
" return deleted;"
],
"header": "@@ -198,7 +223,13 @@ public final class DataStore {",
"removed": [
" return _deleteAll(nPath);"
]
}
]
}
] |
derby-DERBY-4428-c406220e
|
DERBY-4428: Add proper delete mechanism for in-memory databases
This patch adds the final pieces to enable the drop attribute for in-memory
databases. Usage: 'jdbc:derby:memory:myDB;drop=true[;user=...;password=...]'
If successful, an exception with SQL state 08006 is raised.
* EmbedConnection
Added parsing of the drop attribute. 'true' yields true, everything else
results in false (i.e. non-boolean values are accepted and silently ignored).
Added code to drop a database.
Introduced XJ048, raised when drop is combined with shutdown.
Added code to check for attribute conflicts (create, restore).
Added utility method to put the current thread to sleep.
* DatabaseContextImpl
Added a comment.
* Monitor
Added code to disallow dropping databases not using the in-memory back end.
* Attribute
Added constant for the 'drop' JDBC connection URL attribute.
* SQLState, messages.xml, lang.ErrorCodeTest
Added two new messages: 08006 (database dropped) and XJ048 (conflicting
boot attributes)
* Other tests
Enabled drop functionality and tests using it.
Patch file: derby-4428-2b-generic_db_drop.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@884542 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/jdbc/EmbedConnection.java",
"hunks": [
{
"added": [
" final boolean dropDatabase = isDropDatabase(info);",
"",
" // Don't allow both the shutdown and the drop attribute.",
" if (shutdown && dropDatabase) {",
" throw newSQLException(",
" SQLState.CONFLICTING_BOOT_ATTRIBUTES,",
" Attribute.SHUTDOWN_ATTR + \", \" + Attribute.DROP_ATTR);",
" }"
],
"header": "@@ -270,6 +270,14 @@ public abstract class EmbedConnection implements EngineConnection",
"removed": []
},
{
"added": [
" dropDatabase ||"
],
"header": "@@ -277,6 +285,7 @@ public abstract class EmbedConnection implements EngineConnection",
"removed": []
},
{
"added": [
" if (createBoot && !shutdown && !dropDatabase)"
],
"header": "@@ -379,7 +388,7 @@ public abstract class EmbedConnection implements EngineConnection",
"removed": [
"\t\t\tif (createBoot && !shutdown)"
]
},
{
"added": [
" // Drop the database at this point, if that is requested.",
" if (dropDatabase) {",
" if (!usingNoneAuth &&",
" getLanguageConnection().usesSqlAuthorization()) {",
" // Only the database owner is allowed to drop the database.",
" // NOTE: Reusing the message for shutdown, as drop database",
" // includes a shutdown. May want to change this later",
" // if/when we add system privileges.",
" checkIsDBOwner(OP_SHUTDOWN);",
" }",
"",
" // TODO: If system privileges is turned on, we need to check",
" // that the user has the shutdown/drop privilege. Waiting for",
" // Derby-2109",
"",
" String dbName = tr.getDBName(); // Save before shutdown",
" // TODO: Should block database access and sleep for a while here",
" // Shut down the database.",
" handleException(tr.shutdownDatabaseException());",
" // Give running threads a chance to detect the shutdown.",
" // Removing the service, or rather its conglomerates, too early",
" // may cause a number of errors to be thrown. Try to make the",
" // shutdown/drop as clean as possible.",
" sleep(500L);",
" Monitor.removePersistentService(dbName);",
" // Generate the drop database exception here, as this is the",
" // only place it will be thrown.",
" StandardException se = StandardException.newException(",
" SQLState.DROP_DATABASE, dbName);",
" se.setReport(StandardException.REPORT_NEVER);",
" throw se;",
" }",
""
],
"header": "@@ -555,6 +564,39 @@ public abstract class EmbedConnection implements EngineConnection",
"removed": []
},
{
"added": [
" // Don't allow combinations of create/restore and drop.",
" if (createCount == 1 && isDropDatabase(p)) {",
" // See whether we have conflicting create or restore attributes.",
" String sqlState = SQLState.CONFLICTING_CREATE_ATTRIBUTES;",
" if (restoreCount > 0) {",
" sqlState = SQLState.CONFLICTING_RESTORE_ATTRIBUTES;",
" }",
" throw newSQLException(sqlState);",
" }",
""
],
"header": "@@ -664,6 +706,16 @@ public abstract class EmbedConnection implements EngineConnection",
"removed": []
},
{
"added": [
" /**",
" * Examines the boot properties and determines if the given attributes",
" * would entail dropping the database.",
" *",
" * @param p the attribute set",
" * @return {@code true} if the drop database operation is requested,",
" * {@code false} if not.",
" */",
" private boolean isDropDatabase(Properties p) {",
" return (Boolean.valueOf(",
" p.getProperty(Attribute.DROP_ATTR)).booleanValue());",
" }",
""
],
"header": "@@ -676,6 +728,19 @@ public abstract class EmbedConnection implements EngineConnection",
"removed": []
}
]
},
{
"file": "java/shared/org/apache/derby/shared/common/reference/SQLState.java",
"hunks": [
{
"added": [
" String CONFLICTING_BOOT_ATTRIBUTES = \"XJ048.C\";"
],
"header": "@@ -1613,6 +1613,7 @@ public interface SQLState {",
"removed": []
}
]
}
] |
derby-DERBY-4429-48229dfd
|
DERBY-4429 - Let StressMultiTest read Threads and Minutes to run as system property
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@832364 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4432-b1a01b25
|
DERBY-4432: Memory leak when attempting to boot non-existing database with the in-memory back end.
Fixed memory leak happening when trying to boot non-existing databases by using a dummy store to handle operations that don't require a proper store.
Added a regression test (must be run with a small Java heap).
Also changed the existing test by using a discarding log writer to avoid writing a huge log file to disk (a message is logged for each failed boot).
Patch file: derby-4432-1a-mem_leak_fix.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@834418 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/io/VFMemoryStorageFactory.java",
"hunks": [
{
"added": [
" /**",
" * Dummy store used to carry out frequent operations that don't",
" * require a \"proper store\", for instance getting the canonical name",
" * of the data store.",
" */",
" private static final DataStore DUMMY_STORE = new DataStore(\"::DUMMY::\");",
""
],
"header": "@@ -46,6 +46,13 @@ public class VFMemoryStorageFactory",
"removed": []
},
{
"added": [
" } else if (uniqueName != null) {",
" } else {",
" // We have a database name, but no unique name.",
" // Assume that the client only wants to do some",
" // \"book-keeping\" operations, like getting the",
" // canonical name.",
" this.dbData = DUMMY_STORE;"
],
"header": "@@ -114,10 +121,16 @@ public class VFMemoryStorageFactory",
"removed": [
" } else {"
]
}
]
}
] |
derby-DERBY-4433-a8db6658
|
DERBY-4442: Default value and identity in an INSERT result set evaluated too early
Always add a ProjectRestrictNode on top of the source result set for an
insert if the source columns are not ordered the same way as the target
columns, or if inserting into a subset of the columns, and the source is not
a table constructor. Then the adding of default values to the source result
column list happens on top of the original source result node, and not
inside that node. This means that the source result is produced before the
default values are added, and the generation of the defaults happens at the
correct time.
As a side-effect, these other issues are fixed:
DERBY-3 Identity column not filled consecutively with "insert ... select distinct"
DERBY-4433 Cannot insert from EXCEPT/INTERSECT when target table has more columns than the source
Much of the added code is actually old code revived from the Subversion
history. Before DERBY-1644, we used to put a ProjectRestrictNode on top of
the source result if it was a SetOperatorNode. The code that inserted the
ProjectRestrictNode was added back and changed to operate on ResultSetNode
instead of SetOperatorNode.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@885421 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/InsertNode.java",
"hunks": [
{
"added": [
"\t\tresultSet = enhanceAndCheckForAutoincrement(resultSet, inOrder, colMap);"
],
"header": "@@ -411,9 +411,7 @@ public final class InsertNode extends DMLModStatementNode",
"removed": [
"\t\tenhanceAndCheckForAutoincrement(resultSet, inOrder,",
"\t\t\t\tnumTableColumns, colMap, dataDictionary,",
" targetTableDescriptor, targetVTI );"
]
},
{
"added": [
"\t * @return a node representing the source for the insert",
"\tResultSetNode enhanceAndCheckForAutoincrement(",
"\t\t\tResultSetNode resultSet, boolean inOrder, int[] colMap)"
],
"header": "@@ -529,19 +527,13 @@ public final class InsertNode extends DMLModStatementNode",
"removed": [
"\t * @param numTableColumns # of columns in target RCL",
"\t * @param dataDictionary DataDictionary to use",
"\t * @param targetTableDescriptor Table Descriptor for target",
"\t * @param targetVTI Target description if it is a VTI",
"\tprivate void enhanceAndCheckForAutoincrement(ResultSetNode resultSet, ",
"\t\t\tboolean inOrder, int numTableColumns, int []colMap, ",
"\t\t\tDataDictionary dataDictionary,",
"\t\t\tTableDescriptor targetTableDescriptor,",
" FromVTI targetVTI)"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ResultSetNode.java",
"hunks": [
{
"added": [
"\t * we will add a ProjectRestrictNode on top of the source with an RCL that",
"\t * @param target the target node for the insert",
"\t * @param inOrder are source cols in same order as target cols?",
"\t * @return a node that replaces this node and whose RCL matches the target",
"\t * RCL. May return this node if no changes to the RCL are needed, or if the",
"\t * RCL is modified in-place.",
"\tResultSetNode enhanceRCLForInsert(",
"\t\t\tInsertNode target, boolean inOrder, int[] colMap)",
"\t\tthrows StandardException",
"\t{",
"\t\tif (!inOrder || resultColumns.size() < target.resultColumnList.size()) {",
"\t\t\treturn generateProjectRestrictForInsert(target, colMap);",
"\t\t}",
"\t\treturn this;",
"\t}",
"",
"\t/**",
"\t * Generate an RCL that can replace the original RCL of this node to",
"\t * match the RCL of the target for the insert.",
"\t *",
"\t * @param target the target node for the insert",
"\t * @param colMap int array representation of correspondence between",
"\t * RCLs - colmap[i] = -1 -> missing in current RCL",
"\t * colmap[i] = j -> targetRCL(i) <-> thisRCL(j+1)",
"\t * @return an RCL that matches the target RCL",
"\t */",
"\tResultColumnList getRCLForInsert(InsertNode target, int[] colMap)"
],
"header": "@@ -916,24 +916,42 @@ public abstract class ResultSetNode extends QueryTreeNode",
"removed": [
"\t * we will reorder and/or add defaults to the current RCL so that is",
"\t * @param numTargetColumns\t# of columns in target RCL",
"\t * @param dataDictionary\tDataDictionary to use",
"\t * @param targetTD\t\t\tTableDescriptor for target if the target is not a VTI, null if a VTI",
" * @param targetVTI Target description if it is a VTI, null if not a VTI",
"\tpublic void enhanceRCLForInsert(int numTargetColumns, int[] colMap, ",
"\t\t\t\t\t\t\t\t\t\t\t DataDictionary dataDictionary,",
"\t\t\t\t\t\t\t\t\t\t\t TableDescriptor targetTD,",
" FromVTI targetVTI)"
]
},
{
"added": [
"\t\tint numTargetColumns = target.resultColumnList.size();"
],
"header": "@@ -946,10 +964,10 @@ public abstract class ResultSetNode extends QueryTreeNode",
"removed": [
"\t\t\tColumnReference newColumnReference;"
]
},
{
"added": [
"\t\t\t\tnewResultColumn = genNewRCForInsert(",
"\t\t\t\t\t\ttarget.targetTableDescriptor,",
"\t\t\t\t\t\ttarget.targetVTI,",
"\t\t\t\t\t\tindex + 1,",
"\t\t\t\t\t\ttarget.getDataDictionary());",
"\t\treturn newResultCols;"
],
"header": "@@ -958,14 +976,17 @@ public abstract class ResultSetNode extends QueryTreeNode",
"removed": [
"\t\t\t\tnewResultColumn = genNewRCForInsert(targetTD, targetVTI, index + 1, dataDictionary);",
"\t\t/* Set the source RCL to the massaged version */",
"\t\tresultColumns = newResultCols;"
]
}
]
}
] |
derby-DERBY-4435-aa86a51f
|
DERBY-4435
collation[] object was not getting initialized and that was the cause of NPE. Fixed the problem by initializing the arrary correctly.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@834725 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java",
"hunks": [
{
"added": [
" collation = new int[numIndexes][];"
],
"header": "@@ -2162,6 +2162,7 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": []
}
]
}
] |
derby-DERBY-4437-311ee54c
|
DERBY-4437: Make the allocation of sequence/identity ranges pluggable.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1138434 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/catalog/SequenceGenerator.java",
"hunks": [
{
"added": [
"import org.apache.derby.catalog.SequencePreallocator;"
],
"header": "@@ -20,6 +20,7 @@",
"removed": []
},
{
"added": [],
"header": "@@ -111,14 +112,6 @@ public class SequenceGenerator",
"removed": [
" /**",
" * Default number of values to pre-allocate. In the future, we may want to provide",
" * something more sophisticated. For instance, we might want to make Derby tune",
" * this number per sequence generator or give the user the power to override Derby's",
" * decision.",
" */",
" public static final int DEFAULT_PREALLOCATION_COUNT = 5;",
""
]
},
{
"added": [
" // Name of the schema that the sequence lives in.",
" private final String _SCHEMA_NAME;",
"",
" // Name of the sequence.",
" // Logic to determine how many values to pre-allocate",
" private final SequencePreallocator _PREALLOCATOR;",
""
],
"header": "@@ -168,9 +161,15 @@ public class SequenceGenerator",
"removed": [
" // Name of the sequence (for error messages)."
]
},
{
"added": [],
"header": "@@ -183,12 +182,6 @@ public class SequenceGenerator",
"removed": [
" // This is the number of values to pre-allocate per chunk. Right now this",
" // is a constant which we figure out when we initialize the generator.",
" // However, this number could change over time if, for instance, Derby",
" // tunes it on the fly.",
" private long _valuesPerAllocation;",
" "
]
},
{
"added": [
" String schemaName,",
" String sequenceName,",
" SequencePreallocator sequencePreallocator"
],
"header": "@@ -209,7 +202,9 @@ public class SequenceGenerator",
"removed": [
" String sequenceName"
]
},
{
"added": [
" _SCHEMA_NAME = schemaName;",
" _PREALLOCATOR = sequencePreallocator;"
],
"header": "@@ -229,14 +224,14 @@ public class SequenceGenerator",
"removed": [
"",
" _valuesPerAllocation = computePreAllocationCount();"
]
},
{
"added": [
" /**",
" * <p>",
" * Get the name of the schema of this sequence generator. Technically, this doesn't need to be",
" * synchronized. But it is simpler to just maintain a rule that all public methods",
" * should be synchronized.",
" * </p>",
" */",
" public synchronized String getSchemaName() { return _SCHEMA_NAME; }",
" "
],
"header": "@@ -245,6 +240,15 @@ public class SequenceGenerator",
"removed": []
},
{
"added": [
" ( SQLState.LANG_SEQUENCE_GENERATOR_EXHAUSTED, _SCHEMA_NAME, _SEQUENCE_NAME );"
],
"header": "@@ -299,7 +303,7 @@ public class SequenceGenerator",
"removed": [
" ( SQLState.LANG_SEQUENCE_GENERATOR_EXHAUSTED, _SEQUENCE_NAME );"
]
},
{
"added": [
" int preferredValuesPerAllocation = computePreAllocationCount();",
" "
],
"header": "@@ -403,6 +407,8 @@ public class SequenceGenerator",
"removed": []
},
{
"added": [
" if ( remainingLegalValues >= preferredValuesPerAllocation )",
" newValueOnDisk = oldCurrentValue + ( preferredValuesPerAllocation * _INCREMENT );",
" valuesToAllocate = preferredValuesPerAllocation;"
],
"header": "@@ -414,10 +420,10 @@ public class SequenceGenerator",
"removed": [
" if ( remainingLegalValues >= _valuesPerAllocation )",
" newValueOnDisk = oldCurrentValue + ( _valuesPerAllocation * _INCREMENT );",
" valuesToAllocate = _valuesPerAllocation;"
]
},
{
"added": [
" long spillOverValues = preferredValuesPerAllocation - remainingLegalValues;",
" valuesToAllocate = preferredValuesPerAllocation;"
],
"header": "@@ -425,13 +431,13 @@ public class SequenceGenerator",
"removed": [
" long spillOverValues = _valuesPerAllocation - remainingLegalValues;",
" valuesToAllocate = _valuesPerAllocation;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/catalog/SequenceUpdater.java",
"hunks": [
{
"added": [
"import org.apache.derby.catalog.SequencePreallocator;",
"import org.apache.derby.iapi.reference.Property;",
"import org.apache.derby.iapi.services.property.PropertyUtil;"
],
"header": "@@ -20,13 +20,16 @@",
"removed": []
},
{
"added": [
" /** Make a new range allocator (called when the generator is instantiated) */",
" protected SequencePreallocator makePreallocator( TransactionController tc )",
" throws StandardException",
" {",
" String propertyName = Property.LANG_SEQUENCE_PREALLOCATOR;",
" String className = PropertyUtil.getServiceProperty( tc, propertyName );",
"",
" if ( className == null ) { return new SequenceRange(); }",
"",
" try {",
" return (SequencePreallocator) Class.forName( className ).newInstance();",
" }",
" catch (ClassNotFoundException e) { throw missingAllocator( propertyName, className, e ); }",
" catch (ClassCastException e) { throw missingAllocator( propertyName, className, e ); }",
" catch (InstantiationException e) { throw missingAllocator( propertyName, className, e ); }",
" catch (IllegalAccessException e) { throw missingAllocator( propertyName, className, e ); }",
" }",
" private StandardException missingAllocator( String propertyName, String className, Exception e )",
" {",
" return StandardException.newException( SQLState.LANG_UNKNOWN_SEQUENCE_PREALLOCATOR, e, propertyName, className );",
" }",
" "
],
"header": "@@ -434,6 +437,28 @@ public abstract class SequenceUpdater implements Cacheable",
"removed": []
},
{
"added": [
" isd.getSchemaDescriptor().getSchemaName(),",
" isd.getSequenceName(),",
" makePreallocator( readOnlyTC )"
],
"header": "@@ -495,7 +520,9 @@ public abstract class SequenceUpdater implements Cacheable",
"removed": [
" isd.getSequenceName()"
]
}
]
},
{
"file": "java/shared/org/apache/derby/shared/common/reference/SQLState.java",
"hunks": [
{
"added": [
"\tString LANG_INVALID_ROW_COUNT_OFFSET = \"2201X\";"
],
"header": "@@ -719,8 +719,8 @@ public interface SQLState {",
"removed": [
"\tString LANG_INVALID_ROW_COUNT_OFFSET = \"2201X\";"
]
}
]
}
] |
derby-DERBY-4437-35067bf6
|
DERBY-4437: Add more upgrade tests for the changes to the behavior of sequences/identities.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1142013 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4437-365a7500
|
DERBY-4437: Change default length of preallocated ranges for sequences and identities and make this default easy to configure.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1141567 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/catalog/SequenceRange.java",
"hunks": [
{
"added": [
" * Default number of values to pre-allocate. This is the size of the preallocation range",
" * used by other databases. See DERBY-4437.",
" private static final int DEFAULT_PREALLOCATION_COUNT = 20;"
],
"header": "@@ -38,12 +38,10 @@ public class SequenceRange implements SequencePreallocator",
"removed": [
" * Default number of values to pre-allocate. In the future, we may want to provide",
" * something more sophisticated. For instance, we might want to make Derby tune",
" * this number per sequence generator or give the user the power to override Derby's",
" * decision.",
" private static final int DEFAULT_PREALLOCATION_COUNT = 5;"
]
},
{
"added": [
" private int _rangeSize;",
""
],
"header": "@@ -51,6 +49,8 @@ public class SequenceRange implements SequencePreallocator",
"removed": []
},
{
"added": [
" public SequenceRange()",
" {",
" this( DEFAULT_PREALLOCATION_COUNT );",
" }",
"",
" public SequenceRange( int rangeSize )",
" {",
" if ( rangeSize <= 0 ) { rangeSize = DEFAULT_PREALLOCATION_COUNT; }",
" ",
" _rangeSize = rangeSize;",
" }"
],
"header": "@@ -58,7 +58,17 @@ public class SequenceRange implements SequencePreallocator",
"removed": [
" public SequenceRange() {}"
]
}
]
}
] |
derby-DERBY-4437-729be573
|
DERBY-4437: Replace old identity generator with a new, pre-allocating sequence generator.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1135226 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/DataDictionary.java",
"hunks": [
{
"added": [],
"header": "@@ -1717,29 +1717,6 @@ public interface DataDictionary",
"removed": [
"\t/**",
"\t * getSetAutoincrementValue fetches the autoincrement value from ",
"\t * SYSCOLUMNS given a row location. If doUpdate is true it updates",
"\t * the autoincrement column with the new value.",
"\t * the value returned by this routine is the new value and *NOT* the",
"\t * value in the system catalogs.",
"\t * ",
"\t * @param rl\t\tRowLocation of the entry in SYSCOLUMNS.",
"\t * @param tc\t\tTransactionController to use.",
"\t * @param doUpdate Write the new value to disk if TRUE.",
"\t * @param newValue\tA NumberDataValue to use to return incremented value. If",
"\t * null, then the caller simply wants the current value fromd disk.",
"\t * @param wait\t\tIf true, then the caller wants to wait for locks. When",
"\t * using a nested user xaction we want to timeout right away if the parent",
"\t * holds the lock.",
"\t */",
"\tpublic NumberDataValue \tgetSetAutoincrementValue(RowLocation rl,",
"\t\t\t\t\t\t\t\t\t\t\t TransactionController tc,",
"\t\t\t\t\t\t\t\t\t\t\t boolean doUpdate,",
"\t\t\t\t\t\t\t\t\t\t\t NumberDataValue newValue,",
"\t\t\t\t\t\t\t\t\t\t\t boolean wait)",
"\t\tthrows StandardException;",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/SequenceDescriptor.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.types.TypeId;"
],
"header": "@@ -32,6 +32,7 @@ import org.apache.derby.iapi.sql.depend.DependencyManager;",
"removed": []
},
{
"added": [
" // indexes into array of computed min/max values",
" public static final int MIN_VALUE = 0;",
" public static final int MAX_VALUE = MIN_VALUE + 1;"
],
"header": "@@ -45,6 +46,9 @@ import org.apache.derby.impl.sql.catalog.DDdependableFinder;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/catalog/DataDictionaryImpl.java",
"hunks": [
{
"added": [
" private CacheManager idGeneratorCache;",
" private int idgenCacheSize;"
],
"header": "@@ -385,11 +385,13 @@ public final class\tDataDictionaryImpl",
"removed": []
},
{
"added": [
"\t\tvalue = startParams.getProperty(Property.LANG_IDGEN_CACHE_SIZE);",
"\t\tidgenCacheSize = PropertyUtil.intPropertyValue(Property.LANG_IDGEN_CACHE_SIZE, value,",
"\t\t\t\t\t\t\t\t\t 0, Integer.MAX_VALUE, Property.LANG_IDGEN_CACHE_SIZE_DEFAULT);",
""
],
"header": "@@ -633,6 +635,10 @@ public final class\tDataDictionaryImpl",
"removed": []
},
{
"added": [
"\t\tidGeneratorCache = cf.newCacheManager",
" ( this, \"IdentityGeneratorCache\", idgenCacheSize, idgenCacheSize );",
""
],
"header": "@@ -688,6 +694,9 @@ public final class\tDataDictionaryImpl",
"removed": []
},
{
"added": [
" else if ( cm == idGeneratorCache ) { return new SequenceUpdater.SyscolumnsUpdater( this ); }"
],
"header": "@@ -979,6 +988,7 @@ public final class\tDataDictionaryImpl",
"removed": []
},
{
"added": [
"\tprivate RowLocation computeIdentityRowLocation(TransactionController tc,",
" \t\t\t\t\t\t\t\t\t\t\t\t\tTableDescriptor td)",
" \t\t\t\tthrows StandardException",
" \t{",
" \t\tint size;",
"\t\tif (!(td.tableHasAutoincrement())) { return null; }",
" ",
" \t\tsize = td.getNumberOfColumns();",
" ",
" \t\tfor (int i = 0; i < size; i++)",
" \t\t{",
" \t\t\tColumnDescriptor cd = td.getColumnDescriptor(i + 1);",
" \t\t\tif (cd.isAutoincrement())",
"\t\t\t\treturn computeRowLocation(tc, td, cd.getColumnName());",
" \t\t}",
" ",
"\t\treturn null;",
" \t}"
],
"header": "@@ -8633,91 +8643,26 @@ public final class\tDataDictionaryImpl",
"removed": [
"",
"\t/**",
"\t * @see DataDictionary#getSetAutoincrementValue",
"\t */",
"\tpublic NumberDataValue getSetAutoincrementValue(",
"\t\t\t\t\t\t\t\t\t\t\tRowLocation rl, ",
"\t\t\t\t\t\t\t\t\t\t\tTransactionController tc,",
"\t\t\t\t\t\t\t\t\t\t\tboolean doUpdate,",
"\t\t\t\t\t\t\t\t\t\t\tNumberDataValue newValue,",
"\t\t\t\t\t\t\t\t\t\t\tboolean wait)",
"\t throws StandardException",
"\t{",
"",
"\t\tFormatableBitSet columnToUpdate = new ",
" \t\t\tFormatableBitSet(SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMN_COUNT);",
" \t\tint columnNum = SYSCOLUMNSRowFactory.SYSCOLUMNS_AUTOINCREMENTVALUE;",
"\t\tTabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];",
" \t\tConglomerateController heapCC = null;",
"\t\tSYSCOLUMNSRowFactory\trf = (SYSCOLUMNSRowFactory) ti.getCatalogRowFactory();",
"\t\tExecRow row = rf.makeEmptyRow();",
"",
" \t\tFormatableBitSet columnToRead = new",
" \t\t\tFormatableBitSet(SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMN_COUNT);",
"\t\t",
"\t\t// FormatableBitSet is 0 based.",
" \t\tcolumnToRead.set(columnNum - 1); // current value.",
"\t\tcolumnToRead.set(columnNum); // start value.",
"\t\tcolumnToRead.set(columnNum + 1); // increment value.",
"",
" try",
" {",
"\t\t\t/* if wait is true then we need to do a wait while trying to",
"\t\t\t open/fetch from the conglomerate. note we use wait both to",
"\t\t\t open as well as fetch from the conglomerate.",
"\t\t\t*/",
" heapCC = ",
" tc.openConglomerate(",
" ti.getHeapConglomerate(), ",
" false,",
" (TransactionController.OPENMODE_FORUPDATE |",
" ((wait) ? 0 : TransactionController.OPENMODE_LOCK_NOWAIT)),",
" TransactionController.MODE_RECORD,",
" TransactionController.ISOLATION_REPEATABLE_READ);",
"",
" boolean baseRowExists = ",
" heapCC.fetch(rl, row.getRowArray(), columnToRead, wait);",
"",
" columnToUpdate.set(columnNum - 1); // current value.",
"",
" // while the Row interface is 1 based.",
" NumberDataValue currentAI = (NumberDataValue)row.getColumn(columnNum);",
" long currentAIValue = currentAI.getLong();",
" ",
" if (doUpdate)",
" {",
" // we increment and store the new value in SYSCOLUMNS",
" NumberDataValue increment = (NumberDataValue)row.getColumn(columnNum + 2);",
" currentAI = currentAI.plus(currentAI, increment, currentAI);",
" row.setColumn(columnNum, currentAI);",
" heapCC.replace(rl, row.getRowArray(), columnToUpdate);",
" }",
" ",
" // but we return the \"currentAIValue\"-- i.e the value before",
" // incrementing it. ",
" if (newValue != null)",
" {",
" // user has passed in an object; set the current value in there and",
" // return it.",
" newValue.setValue(currentAIValue);",
" return newValue;",
" }",
" ",
" else",
" // reuse the object read from row.",
" currentAI.setValue(currentAIValue);",
" return currentAI;",
" }",
" finally",
" {",
" if (heapCC != null)",
" heapCC.close();",
" }",
"\t}"
]
},
{
"added": [
"",
"",
"",
"\t\tidGeneratorCache.cleanAll();",
"\t\tidGeneratorCache.ageOut();",
""
],
"header": "@@ -8884,10 +8829,16 @@ public final class\tDataDictionaryImpl",
"removed": []
},
{
"added": [
"\t * autoincrement column. this throws away the sequence generator for the",
" * value so that it must be created from scratch."
],
"header": "@@ -10013,7 +9964,8 @@ public final class\tDataDictionaryImpl",
"removed": [
"\t * autoincrement column."
]
},
{
"added": [
"",
" // remove the generator for this identity column so that it will be reinitialized with the new value.",
" flushIdentityFromCache( tableUUID );",
" ",
" /**",
" * Remove an id generator from the cache so that it will have to be recreated.",
" * This method is called after changing the generator on disk.",
" */",
" private void flushIdentityFromCache( UUID tableID ) throws StandardException",
" {",
" Cacheable idGenerator = idGeneratorCache.findCached( tableID.toString() );",
" if ( idGenerator != null ) { idGeneratorCache.remove( idGenerator ); }",
" }",
""
],
"header": "@@ -10063,9 +10015,23 @@ public final class\tDataDictionaryImpl",
"removed": []
},
{
"added": [
"\t/**",
"\t * Computes the RowLocation in SYSCOLUMNS for the identity column of a table. Also",
" * constructs a sequence descriptor describing the current state of the identity sequence.",
"\t * ",
"\t * @param tc\t\t\tTransaction Controller to use.",
"\t * @param tableIDstring UUID of the table as a string",
"\t * @param rowLocation OUTPUT param for returing the row location",
"\t * @param sequenceDescriptor OUTPUT param for return the sequence descriptor",
" *",
"\t * @exception StandardException thrown on failure.",
"\t */ ",
"\tvoid computeIdentityRowLocation",
" ( TransactionController tc, String tableIDstring, RowLocation[] rowLocation, SequenceDescriptor[] sequenceDescriptor )",
"\t\tthrows StandardException\t\t\t\t\t\t\t\t ",
"\t{",
" UUID tableID = getUUIDFactory().recreateUUID( tableIDstring );",
" TableDescriptor td = getTableDescriptor( tableID );",
"",
" // there should only be 1 identity column per table",
" rowLocation[ 0 ] = computeIdentityRowLocation( tc, td );",
"",
"\t\tTabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];",
" \t\tConglomerateController heapCC = null;",
"\t\tSYSCOLUMNSRowFactory\trf = (SYSCOLUMNSRowFactory) ti.getCatalogRowFactory();",
"\t\tExecRow row = rf.makeEmptyRow();",
"\t\tFormatableBitSet columnsToFetch = new FormatableBitSet( SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMN_COUNT );",
"",
" for ( int i = 0; i < SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMN_COUNT; i++ )",
" {",
" columnsToFetch.set( i );",
" }",
"",
" try",
" {",
" heapCC = ",
" tc.openConglomerate(",
" ti.getHeapConglomerate(), ",
" false,",
" 0,",
" TransactionController.MODE_RECORD,",
" TransactionController.ISOLATION_REPEATABLE_READ);",
"",
" heapCC.fetch( rowLocation[ 0 ], row.getRowArray(), columnsToFetch, true );",
"",
" ColumnDescriptor cd = (ColumnDescriptor) rf.buildDescriptor( row, td, this );",
" DataTypeDescriptor dtd = cd.getType();",
" Long[] minMax = SequenceDescriptor.computeMinMax( dtd, null, null );",
"",
" sequenceDescriptor[ 0 ] = getDataDescriptorGenerator().newSequenceDescriptor",
" (",
" td.getSchemaDescriptor(),",
" td.getUUID(),",
" td.getName(),",
" dtd,",
" new Long( cd.getAutoincValue() ),",
" cd.getAutoincStart(),",
" minMax[ SequenceDescriptor.MIN_VALUE ].longValue(),",
" minMax[ SequenceDescriptor.MAX_VALUE ].longValue(),",
" cd.getAutoincInc(),",
" false",
" );",
" }",
" finally",
" {",
" if (heapCC != null) { heapCC.close(); }",
" }",
"\t}",
"",
"\t/**",
"\t * Set the current value of an identity sequence. This method does not perform",
" * any sanity checking but assumes that the caller knows what they are doing. If the",
" * old value on disk is not what we expect it to be, then we are in a race with another",
" * session. They won and we don't update the value on disk. However, if the old value",
" * is null, that is a signal to us that we should update the value on disk anyway.",
"\t * ",
"\t * @param tc\t\t\tTransaction Controller to use.",
"\t * @param rowLocation Row in SYSCOLUMNS to update.",
" * @param wait True if we should wait for locks",
" * @param oldValue What we expect to find in the AUTOINCREMENTVALUE column.",
" * @param newValue What to stuff into the AUTOINCREMENTVALUE column.",
"\t * ",
"\t * @return Returns true if the value was successfully updated, false if we lost a race with another session.",
" *",
"\t * @exception StandardException thrown on failure.",
"\t */",
" boolean updateCurrentIdentityValue",
" ( TransactionController tc, RowLocation rowLocation, boolean wait, Long oldValue, Long newValue )",
" throws StandardException",
" {",
" return updateCurrentSeqValue",
" (",
" tc,",
" rowLocation,",
" wait,",
" oldValue,",
" newValue,",
" coreInfo[SYSCOLUMNS_CORE_NUM],",
" SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMN_COUNT,",
" SYSCOLUMNSRowFactory.SYSCOLUMNS_AUTOINCREMENTVALUE",
" );",
" }",
" "
],
"header": "@@ -10093,6 +10059,108 @@ public final class\tDataDictionaryImpl",
"removed": []
},
{
"added": [
" return updateCurrentSeqValue",
" (",
" tc,",
" rowLocation,",
" wait,",
" oldValue,",
" newValue,",
" getNonCoreTI( SYSSEQUENCES_CATALOG_NUM ),",
" SYSSEQUENCESRowFactory.SYSSEQUENCES_COLUMN_COUNT,",
" SYSSEQUENCESRowFactory.SYSSEQUENCES_CURRENT_VALUE",
" );",
" }",
" ",
"\t/**",
"\t * Set the current value of an ANSI/ISO sequence or identity column. This method does not perform",
" * any sanity checking but assumes that the caller knows what they are doing. If the",
" * old value on disk is not what we expect it to be, then we are in a race with another",
" * session. They won and we don't update the value on disk. However, if the old value",
" * is null, that is a signal to us that we should update the value on disk anyway.",
"\t * ",
"\t * @param tc\t\t\tTransaction Controller to use.",
"\t * @param rowLocation Row in SYSSEQUENCES or SYSCOLUMNS to update.",
" * @param wait True if we should wait for locks",
" * @param oldValue What we expect to find in the currentvalue column.",
" * @param newValue What to stuff into the current value column.",
" * @param ti Table info for the catalog that is being updated.",
" * @param columnsInRow Number of columns in the catalog row.",
" * @param columnNum ID of the current value column",
"\t * ",
"\t * @return Returns true if the value was successfully updated, false if we lost a race with another session.",
" *",
"\t * @exception StandardException thrown on failure.",
"\t */",
" private boolean updateCurrentSeqValue",
" (",
" TransactionController tc,",
" RowLocation rowLocation,",
" boolean wait,",
" Long oldValue,",
" Long newValue,",
" TabInfoImpl ti,",
" int columnsInRow,",
" int columnNum",
" )",
" throws StandardException",
" {",
"\t\tFormatableBitSet columnToUpdate = new FormatableBitSet( columnsInRow );",
"\t\tCatalogRowFactory\trf = ti.getCatalogRowFactory();"
],
"header": "@@ -10151,11 +10219,55 @@ public final class\tDataDictionaryImpl",
"removed": [
" \t\tint columnNum = SYSSEQUENCESRowFactory.SYSSEQUENCES_CURRENT_VALUE;",
"\t\tFormatableBitSet columnToUpdate = new FormatableBitSet( SYSSEQUENCESRowFactory.SYSSEQUENCES_COLUMN_COUNT );",
"\t\tTabInfoImpl ti = getNonCoreTI( SYSSEQUENCES_CATALOG_NUM );",
"\t\tSYSSEQUENCESRowFactory\trf = (SYSSEQUENCESRowFactory) ti.getCatalogRowFactory();"
]
},
{
"added": [
" ( int catalogNumber, String uuidString, NumberDataValue returnValue )",
" CacheManager cm = null;",
" switch( catalogNumber )",
" {",
" case SYSSEQUENCES_CATALOG_NUM:",
" cm = sequenceGeneratorCache;",
" break;",
"",
" case SYSCOLUMNS_CATALOG_NUM:",
" cm = idGeneratorCache;",
" break;",
"",
" default:",
" throw StandardException.newException( SQLState.BTREE_UNIMPLEMENTED_FEATURE );",
" }",
"",
" sequenceUpdater = (SequenceUpdater) cm.find( uuidString );"
],
"header": "@@ -10211,13 +10323,28 @@ public final class\tDataDictionaryImpl",
"removed": [
" ( String sequenceUUIDstring, NumberDataValue returnValue )",
" sequenceUpdater = (SequenceUpdater) sequenceGeneratorCache.find( sequenceUUIDstring );"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/CreateSequenceNode.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.dictionary.SequenceDescriptor;"
],
"header": "@@ -26,6 +26,7 @@ import org.apache.derby.iapi.services.sanity.SanityManager;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/NextSequenceNode.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.dictionary.DataDictionary;"
],
"header": "@@ -28,6 +28,7 @@ import org.apache.derby.iapi.services.compiler.MethodBuilder;",
"removed": []
},
{
"added": [
"\t\tmb.push( DataDictionary.SYSSEQUENCES_CATALOG_NUM );"
],
"header": "@@ -133,6 +134,7 @@ public class NextSequenceNode extends ValueNode {",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java",
"hunks": [
{
"added": [
"\t\tif ( hasAutoincrement() )",
" int cacheLength = resultDescription.getColumnCount();",
"\t\t\taiCache = new NumberDataValue[ cacheLength ];",
"\t\t\tfor (int i = 0; i < cacheLength; i++)",
" if ( rcd.getType().getTypeId().isNumericTypeId() )",
" {",
" aiCache[i] = (NumberDataValue)rcd.getType().getNull();",
" }"
],
"header": "@@ -350,19 +350,18 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": [
" RowLocation[] rla;",
"",
"\t\tif ((rla = constants.getAutoincRowLocation()) != null)",
"\t\t\taiCache = ",
"\t\t\t\tnew NumberDataValue[rla.length];",
"\t\t\tfor (int i = 0; i < resultDescription.getColumnCount(); i++)",
"\t\t\t\tif (rla[i] == null)",
"\t\t\t\t\tcontinue;",
"\t\t\t\taiCache[i] = (NumberDataValue)rcd.getType().getNull();"
]
},
{
"added": [
"",
" /**",
" * Return true if the table has an autoincrement column.",
" */",
" private boolean hasAutoincrement() throws StandardException",
" {",
" // Global temporary tables don't have table descriptors but they",
" // don't have identity columns either",
" TableDescriptor tabdesc = getTableDescriptor();",
" if ( tabdesc == null ) { return false; }",
" else { return tabdesc.tableHasAutoincrement(); }",
" }"
],
"header": "@@ -392,6 +391,18 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": []
},
{
"added": [
" /**",
" * Get the table descriptor if it hasn't already been looked up.",
" */",
" private TableDescriptor getTableDescriptor()",
" throws StandardException",
" {",
" if ( td == null ) { td = getDataDictionary().getTableDescriptor(constants.targetUUID); }",
"",
" return td;",
" }",
" ",
" /**",
" * Get the data dictionary if it hasn't already been looked up.",
" */",
" private DataDictionary getDataDictionary()",
" throws StandardException",
" {",
" if ( dd == null ) { dd = lcc.getDataDictionary(); }",
"",
" return dd;",
" }",
" "
],
"header": "@@ -584,6 +595,28 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": []
},
{
"added": [
" dvd = (NumberDataValue) aiCache[ index ].getNewNull();",
" dd.getCurrentValueAndAdvance",
" ( DataDictionary.SYSCOLUMNS_CATALOG_NUM, getTableDescriptor().getUUID().toString(), dvd );"
],
"header": "@@ -768,9 +801,9 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": [
"\t\t\t\t\tdvd = dd.getSetAutoincrementValue(",
"\t\t\t\t\t\t constants.autoincRowLocation[index],",
"\t\t\t\t\t\t\ttc, false, aiCache[index], true);"
]
},
{
"added": [
"\t\t\tNumberDataValue newValue = aiCache[ index ];",
" dd.getCurrentValueAndAdvance",
" ( DataDictionary.SYSCOLUMNS_CATALOG_NUM, getTableDescriptor().getUUID().toString(), newValue );",
"\t\t\tif (setIdentity) { identityVal = newValue.getLong(); }"
],
"header": "@@ -789,77 +822,12 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": [
"\t\t\tNumberDataValue newValue;",
"\t\t\tTransactionController nestedTC = null, tcToUse = tc;",
"",
"\t\t\ttry",
"\t\t\t{",
"\t\t\t\tnestedTC = tc.startNestedUserTransaction(false);",
"\t\t\t\ttcToUse = nestedTC;",
"\t\t\t}",
"\t\t\tcatch (StandardException se)",
"\t\t\t{",
"\t\t\t\t// If I cannot start a Nested User Transaction use the parent",
"\t\t\t\t// transaction to do all the work.",
"\t\t\t\ttcToUse = tc;",
"\t\t\t}",
"\t\t\ttry ",
"\t\t\t{",
"\t\t\t\t/* If tcToUse == tc, then we are using parent xaction-- this",
"\t\t\t\t can happen if for some reason we couldn't start a nested",
"\t\t\t\t transaction",
"\t\t\t\t*/",
"\t\t\t\tnewValue = dd.getSetAutoincrementValue(",
"\t\t\t\t\t\t constants.autoincRowLocation[index],",
"\t\t\t\t\t\t tcToUse, true, aiCache[index], (tcToUse == tc));",
"\t\t\t}",
"",
"\t\t\tcatch (StandardException se)",
"\t\t\t{",
"\t\t\t\tif (tcToUse == tc)",
"\t\t\t\t{",
"\t\t\t\t\t/* we've using the parent xaction and we've timed out; just",
"\t\t\t\t\t throw an error and exit.",
"\t\t\t\t\t*/",
"\t\t\t\t\tthrow se;",
"\t\t\t\t}",
"",
"\t\t\t\tif (se.getMessageId().equals(SQLState.LOCK_TIMEOUT))",
"\t\t\t\t{",
"\t\t\t\t\t// if we couldn't do this with a nested xaction, retry with",
"\t\t\t\t\t// parent-- we need to wait this time!",
"\t\t\t\t\tnewValue = dd.getSetAutoincrementValue(",
"\t\t\t\t\t\t\t\t\tconstants.autoincRowLocation[index],",
"\t\t\t\t\t\t\t\t\ttc, true, aiCache[index], true);",
"\t\t\t\t}",
"\t\t\t\telse if (se.getMessageId().equals(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE))",
"\t\t\t\t{",
"\t\t\t\t\t// if we got an overflow error, throw a more meaningful",
"\t\t\t\t\t// error message",
"\t\t\t\t\tthrow StandardException.newException(",
"\t\t\t\t\t\t\t\t\t\t\t\t SQLState.LANG_AI_OVERFLOW,",
"\t\t\t\t\t\t\t\t\t\t\t\t se,",
"\t\t\t\t\t\t\t\t\t\t\t\t constants.getTableName(),",
"\t\t\t\t\t\t\t\t\t\t\t\t constants.getColumnName(index));",
"\t\t\t\t}",
"\t\t\t\telse throw se;",
"\t\t\t}",
"\t\t\tfinally ",
"\t\t\t{",
"\t\t\t\t// no matter what, commit the nested transaction; if something",
"\t\t\t\t// bad happened in the child xaction lets not abort the parent",
"\t\t\t\t// here.",
"\t\t\t\tif (nestedTC != null)",
"\t\t\t\t{",
"\t\t\t\t\tnestedTC.commit();",
"\t\t\t\t\tnestedTC.destroy();",
"\t\t\t\t}",
"\t\t\t}",
"\t\t\taiCache[index] = newValue;",
"\t\t\tif (setIdentity)",
"\t\t\t\tidentityVal = newValue.getLong();"
]
}
]
},
{
"file": "java/storeless/org/apache/derby/impl/storeless/EmptyDictionary.java",
"hunks": [
{
"added": [
" ( int catalogNumber, String uuidString, NumberDataValue returnValue )"
],
"header": "@@ -702,7 +702,7 @@ public class EmptyDictionary implements DataDictionary, ModuleSupportable {",
"removed": [
" ( String sequenceUUIDstring, NumberDataValue returnValue )"
]
}
]
}
] |
derby-DERBY-4437-fe9e3d41
|
DERBY-4437: Add tests for the generator-based identity columns: ALTER TABLE, bulk import, and deferred INSERT.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1135754 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4441-54a04d34
|
DERBY-4441 Change sysinfo to print out more specific JVM information
Contributed by lilywei at yahoo dot com
Show system properties java.runtime.version and java.fullversion with sysinfo to give more specific JVM information.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@935700 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/tools/org/apache/derby/impl/tools/sysinfo/Main.java",
"hunks": [
{
"added": [
"\tprintPropertyIfNotNull(localAW, \"java.runtime.version\");",
"\tprintPropertyIfNotNull(localAW, \"java.fullversion\");",
" * Print property only if not null",
" * ",
" * @param localAW This is PrintWriter to print to",
" * @param property This is system property string",
" private static void printPropertyIfNotNull(java.io.PrintWriter localAW, String property) {",
" String propertyValue = getJavaProperty(property, true);\t",
" if (propertyValue != null) {",
" localAW.println(property + \": \" + propertyValue);",
" }",
"}",
" ",
" /**",
" * Return Java properties from java.lang.System. Will catch",
" * SecurityExceptions and note them for displaying information.",
" * @param whichProperty This is the name of the property",
" * ",
" * @return getJavaProperty(whichProperty, false) ",
" */",
"\t return getJavaProperty(whichProperty, false); ",
" }",
" ",
" /**",
" * Return Java properties from java.lang.System. Will catch",
" * SecurityExceptions and note them for displaying information.",
" * @param whichProperty This is the name of the property",
" * @param nullUnavailable return nothing if no such java property and nullUnavailable is true",
" * @return the Java property value or a string capturing a",
" * security exception.",
" */",
"",
" private static String getJavaProperty (final String whichProperty, boolean nullUnavailable) {",
" final String unavailable = nullUnavailable ? null : Main.getTextMessage (\"SIF01.H\");"
],
"header": "@@ -303,23 +303,49 @@ public static void getMainInfo (java.io.PrintWriter aw, boolean pause) {",
"removed": [
"",
"",
" Return Java properties from java.lang.System. Will catch",
" SecurityExceptions and note them for displaying information.",
"",
" @return the Java property value or a string capturing a",
" security exception.",
" final String unavailable = Main.getTextMessage (\"SIF01.H\");"
]
}
]
}
] |
derby-DERBY-4442-a8db6658
|
DERBY-4442: Default value and identity in an INSERT result set evaluated too early
Always add a ProjectRestrictNode on top of the source result set for an
insert if the source columns are not ordered the same way as the target
columns, or if inserting into a subset of the columns, and the source is not
a table constructor. Then the adding of default values to the source result
column list happens on top of the original source result node, and not
inside that node. This means that the source result is produced before the
default values are added, and the generation of the defaults happens at the
correct time.
As a side-effect, these other issues are fixed:
DERBY-3 Identity column not filled consecutively with "insert ... select distinct"
DERBY-4433 Cannot insert from EXCEPT/INTERSECT when target table has more columns than the source
Much of the added code is actually old code revived from the Subversion
history. Before DERBY-1644, we used to put a ProjectRestrictNode on top of
the source result if it was a SetOperatorNode. The code that inserted the
ProjectRestrictNode was added back and changed to operate on ResultSetNode
instead of SetOperatorNode.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@885421 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/InsertNode.java",
"hunks": [
{
"added": [
"\t\tresultSet = enhanceAndCheckForAutoincrement(resultSet, inOrder, colMap);"
],
"header": "@@ -411,9 +411,7 @@ public final class InsertNode extends DMLModStatementNode",
"removed": [
"\t\tenhanceAndCheckForAutoincrement(resultSet, inOrder,",
"\t\t\t\tnumTableColumns, colMap, dataDictionary,",
" targetTableDescriptor, targetVTI );"
]
},
{
"added": [
"\t * @return a node representing the source for the insert",
"\tResultSetNode enhanceAndCheckForAutoincrement(",
"\t\t\tResultSetNode resultSet, boolean inOrder, int[] colMap)"
],
"header": "@@ -529,19 +527,13 @@ public final class InsertNode extends DMLModStatementNode",
"removed": [
"\t * @param numTableColumns # of columns in target RCL",
"\t * @param dataDictionary DataDictionary to use",
"\t * @param targetTableDescriptor Table Descriptor for target",
"\t * @param targetVTI Target description if it is a VTI",
"\tprivate void enhanceAndCheckForAutoincrement(ResultSetNode resultSet, ",
"\t\t\tboolean inOrder, int numTableColumns, int []colMap, ",
"\t\t\tDataDictionary dataDictionary,",
"\t\t\tTableDescriptor targetTableDescriptor,",
" FromVTI targetVTI)"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ResultSetNode.java",
"hunks": [
{
"added": [
"\t * we will add a ProjectRestrictNode on top of the source with an RCL that",
"\t * @param target the target node for the insert",
"\t * @param inOrder are source cols in same order as target cols?",
"\t * @return a node that replaces this node and whose RCL matches the target",
"\t * RCL. May return this node if no changes to the RCL are needed, or if the",
"\t * RCL is modified in-place.",
"\tResultSetNode enhanceRCLForInsert(",
"\t\t\tInsertNode target, boolean inOrder, int[] colMap)",
"\t\tthrows StandardException",
"\t{",
"\t\tif (!inOrder || resultColumns.size() < target.resultColumnList.size()) {",
"\t\t\treturn generateProjectRestrictForInsert(target, colMap);",
"\t\t}",
"\t\treturn this;",
"\t}",
"",
"\t/**",
"\t * Generate an RCL that can replace the original RCL of this node to",
"\t * match the RCL of the target for the insert.",
"\t *",
"\t * @param target the target node for the insert",
"\t * @param colMap int array representation of correspondence between",
"\t * RCLs - colmap[i] = -1 -> missing in current RCL",
"\t * colmap[i] = j -> targetRCL(i) <-> thisRCL(j+1)",
"\t * @return an RCL that matches the target RCL",
"\t */",
"\tResultColumnList getRCLForInsert(InsertNode target, int[] colMap)"
],
"header": "@@ -916,24 +916,42 @@ public abstract class ResultSetNode extends QueryTreeNode",
"removed": [
"\t * we will reorder and/or add defaults to the current RCL so that is",
"\t * @param numTargetColumns\t# of columns in target RCL",
"\t * @param dataDictionary\tDataDictionary to use",
"\t * @param targetTD\t\t\tTableDescriptor for target if the target is not a VTI, null if a VTI",
" * @param targetVTI Target description if it is a VTI, null if not a VTI",
"\tpublic void enhanceRCLForInsert(int numTargetColumns, int[] colMap, ",
"\t\t\t\t\t\t\t\t\t\t\t DataDictionary dataDictionary,",
"\t\t\t\t\t\t\t\t\t\t\t TableDescriptor targetTD,",
" FromVTI targetVTI)"
]
},
{
"added": [
"\t\tint numTargetColumns = target.resultColumnList.size();"
],
"header": "@@ -946,10 +964,10 @@ public abstract class ResultSetNode extends QueryTreeNode",
"removed": [
"\t\t\tColumnReference newColumnReference;"
]
},
{
"added": [
"\t\t\t\tnewResultColumn = genNewRCForInsert(",
"\t\t\t\t\t\ttarget.targetTableDescriptor,",
"\t\t\t\t\t\ttarget.targetVTI,",
"\t\t\t\t\t\tindex + 1,",
"\t\t\t\t\t\ttarget.getDataDictionary());",
"\t\treturn newResultCols;"
],
"header": "@@ -958,14 +976,17 @@ public abstract class ResultSetNode extends QueryTreeNode",
"removed": [
"\t\t\t\tnewResultColumn = genNewRCForInsert(targetTD, targetVTI, index + 1, dataDictionary);",
"\t\t/* Set the source RCL to the massaged version */",
"\t\tresultColumns = newResultCols;"
]
}
]
}
] |
derby-DERBY-4442-e0ba78cc
|
DERBY-4413 INSERT from SELECT DISTINCT gives assertFailure (sane), or NPE (insane) in presence of generated columns
After DERBY-4442 went in, the exception to the ASSERT check we made in
the original fix for this issue should be rolled back, so as to
provide a better internal consistency check. Patch derby-4413-rollback
does this.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@885659 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/store/access/sort/MergeSort.java",
"hunks": [
{
"added": [],
"header": "@@ -126,12 +126,6 @@ class MergeSort implements Sort",
"removed": [
"",
"\t/**",
" Determine whether a column is used for ordering or not.",
"\t**/",
"\tprivate boolean isOrderingColumn[];",
""
]
},
{
"added": [
"\t\t\t\t\tSanityManager.THROWASSERT(",
"\t\t\t\t\t\t\"col[\" + colid + \"] is null\");"
],
"header": "@@ -461,18 +455,8 @@ class MergeSort implements Sort",
"removed": [
"\t\t\t\t\tif (!isOrderingColumn[colid]) {",
"",
"\t\t\t\t\t\t// DERBY-4413 shows that the value for a generated",
"\t\t\t\t\t\t// column will be null as the result set is computed as",
"\t\t\t\t\t\t// part of an INSERT INTO, so accept a null also.",
"\t\t\t\t\t\t// This column would not be part of the sort key.",
"",
"\t\t\t\t\t\tcontinue;",
"",
"\t\t\t\t\t} else {",
"\t\t\t\t\t\tSanityManager.THROWASSERT(\"col[\" + colid + \"] is null\");",
"\t\t\t\t\t}"
]
},
{
"added": [
" for (int i = 0; i < columnOrdering.length; i++)"
],
"header": "@@ -562,27 +546,13 @@ class MergeSort implements Sort",
"removed": [
"",
"\t\tif (SanityManager.DEBUG) {",
"\t\t\tisOrderingColumn = new boolean[template.length];",
"",
"\t\t\tfor (int i = 0; i < isOrderingColumn.length; i++) {",
"\t\t\t\tisOrderingColumn[i] = false;",
"\t\t\t}",
"\t\t}",
"",
"\t\tfor (int i = 0; i < columnOrdering.length; i++)",
"",
"\t\t\tif (SanityManager.DEBUG) {",
"\t\t\t\tisOrderingColumn[columnOrderingMap[i]] = true;",
"\t\t\t}",
""
]
}
]
}
] |
derby-DERBY-4443-7eebc809
|
DERBY-4443: Wrap rollback in exception handlers in try-catch
This patch was contributed by Houx Zhang (houxzhang at gmail dot com)
The patch modifies the error handling in SystemProcedures.java so that
secondary exceptions during rollback are chained to the primary exception.
Also, EmbedSQLException's wrapping behavior is adjusted so that the
StandardException logic doesn't try to unwrap the chained exceptions.
A new test is added to the tools suite; it simulates rollback errors
using a special mock driver and mock connection, and verifies that
the exceptions are chained properly.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1096991 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/catalog/SystemProcedures.java",
"hunks": [
{
"added": [
"\t\t\trollBackAndThrowSQLException(conn, se);",
"\t",
" /**",
" * issue a rollback when SQLException se occurs. If SQLException ouccurs when rollback,",
" * the new SQLException will be added into the chain of se. ",
" */",
" private static void rollBackAndThrowSQLException(Connection conn,",
" SQLException se) throws SQLException {",
" try {",
" conn.rollback();",
" } catch (SQLException e) {",
" se.setNextException(e);",
" }",
" throw se;",
" }"
],
"header": "@@ -1466,14 +1466,25 @@ public class SystemProcedures {",
"removed": [
"\t\t\t//issue a rollback on any errors",
"\t\t\tconn.rollback();",
"\t\t\tthrow se;",
""
]
},
{
"added": [
" rollBackAndThrowSQLException(conn, se);"
],
"header": "@@ -1507,9 +1518,7 @@ public class SystemProcedures {",
"removed": [
" //issue a rollback on any errors",
" conn.rollback();",
" throw se;"
]
},
{
"added": [
"\t\t rollBackAndThrowSQLException(conn, se);"
],
"header": "@@ -1550,9 +1559,7 @@ public class SystemProcedures {",
"removed": [
"\t\t\t//issue a rollback on any errors",
"\t\t\tconn.rollback();",
"\t\t\tthrow se;"
]
}
]
}
] |
derby-DERBY-4448-47611b15
|
DERBY-4451 ArrayIndexOutOfBoundsException or ASSERT FAILED when inserting generated columns out of order
This patch fixes this issue as well as DERBY-4448, since they share
the same underlying problem: the former way of checking for illegal
override of generated columns in the presence of an explicit target
column list failed to look below the top level UnionNode in a table
constructor.
This specialized treatment for the case of an explicit target column
list has been removed for INSERT (it was shared with logic for
UPDATE), so checking is now done by the code for the case of no
explicit targte column list.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@884163 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/UpdateNode.java",
"hunks": [
{
"added": [
" forbidGenerationOverrides( resultSet.getResultColumns(),",
"\t\t\t\t\t\t\t\t addedGeneratedColumns );"
],
"header": "@@ -374,7 +374,8 @@ public final class UpdateNode extends DMLModStatementNode",
"removed": [
" forbidGenerationOverrides( resultSet.getResultColumns(), true, addedGeneratedColumns );"
]
}
]
}
] |
derby-DERBY-4449-afbf89dc
|
DERBY-4449: ArrayIndexOutOfBoundsException when inserting DEFAULT into
unspecified column
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@951366 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4450-dcd2b043
|
DERBY-4450 GROUP BY in an IN-subquery inside HAVING clause whose select list is subset of group by columns, gives NPE
Patch derby-4450b + Knut's simplification of the autocommit call in GrooupByTest#testDerby4450.
This solves the problem seen in this issue, which was a regression
from DERBY-681. The crux of the problem is that a PRN is added in the
result set tree without adjusting a higher level reference so that
wrong code is generated. The solution here is to reuse the result
column list in the inserted PRN, so that reference from above will
point correctly even after the PRN insertion (more details in JIRA).
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@882732 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4451-47611b15
|
DERBY-4451 ArrayIndexOutOfBoundsException or ASSERT FAILED when inserting generated columns out of order
This patch fixes this issue as well as DERBY-4448, since they share
the same underlying problem: the former way of checking for illegal
override of generated columns in the presence of an explicit target
column list failed to look below the top level UnionNode in a table
constructor.
This specialized treatment for the case of an explicit target column
list has been removed for INSERT (it was shared with logic for
UPDATE), so checking is now done by the code for the case of no
explicit targte column list.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@884163 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/UpdateNode.java",
"hunks": [
{
"added": [
" forbidGenerationOverrides( resultSet.getResultColumns(),",
"\t\t\t\t\t\t\t\t addedGeneratedColumns );"
],
"header": "@@ -374,7 +374,8 @@ public final class UpdateNode extends DMLModStatementNode",
"removed": [
" forbidGenerationOverrides( resultSet.getResultColumns(), true, addedGeneratedColumns );"
]
}
]
}
] |
derby-DERBY-4455-992f56f1
|
DERBY-4455: Prepared statement failure with CLOB: Stream has already been read and end-of-file reached and cannot be re-used.
Don't materialize streams when transferring data values from one statement to another.
The code causing the bug was/is only invoked when using XA.
Patch file: derby-4455-1c.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@901760 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/GenericParameterValueSet.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.error.StandardException;",
"import org.apache.derby.iapi.services.sanity.SanityManager;",
"import org.apache.derby.iapi.reference.JDBC30Translation;",
"import org.apache.derby.iapi.reference.SQLState;",
"import org.apache.derby.iapi.types.SQLBit;",
"import org.apache.derby.iapi.types.SQLBlob;",
"import org.apache.derby.iapi.types.SQLChar;"
],
"header": "@@ -21,29 +21,21 @@",
"removed": [
"import org.apache.derby.iapi.services.loader.ClassFactory;",
"import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;",
"",
"",
"import org.apache.derby.iapi.types.DataValueFactory;",
"",
"import org.apache.derby.iapi.reference.SQLState;",
"",
"import org.apache.derby.iapi.error.StandardException;",
"",
"import org.apache.derby.iapi.services.sanity.SanityManager;",
"import java.sql.Date;",
"import java.sql.Time;",
"import java.sql.Timestamp;",
"import org.apache.derby.iapi.reference.JDBC30Translation;"
]
},
{
"added": [
" DataValueDescriptor dvd = oldp.getValue();",
" InputStream is = null;",
" // See if the value type can hold a stream.",
" // SQLBinary isn't public, check for both SQLBlob and SQLBit.",
" if (dvd instanceof SQLChar || dvd instanceof SQLBlob ||",
" dvd instanceof SQLBit) {",
" is = dvd.getStream();",
" }",
" if (is != null) {",
" // DERBY-4455: Don't materialize the stream when",
" // transferring it. If the stream has been drained already,",
" // and the user doesn't set a new value before executing",
" // the prepared statement again, Derby will fail.",
" pvstarget.getParameterForSet(i).setValue(is,",
" DataValueDescriptor.UNKNOWN_LOGICAL_LENGTH);",
" } else {",
" pvstarget.getParameterForSet(i).setValue(dvd);",
" }"
],
"header": "@@ -269,7 +261,24 @@ final class GenericParameterValueSet implements ParameterValueSet",
"removed": [
"\t\t\t\tpvstarget.getParameterForSet(i).setValue(oldp.getValue());"
]
}
]
}
] |
derby-DERBY-4457-646eea3c
|
DERBY-4457: 'Column value mismatch' in 'testDistinctInsertWithGeneratedColumn(...lang.DistinctTest)' on Jvm 1.5, 1.4, phoneME.
SELECT DISTINCT may return rows in different order on different JVMs. Made the test independent of the actual ordering.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@885726 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4459-aec2537d
|
DERBY-4459: Eliminate optimization which caused verification error in nested function calls.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@964039 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-446-2fe796b2
|
DERBY-446 Remove propertyKey_ constants from ClientbaseDataSOurce that were in place for
the old scheme to get the list of attributes.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@393003 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/client/org/apache/derby/client/am/LogWriter.java",
"hunks": [
{
"added": [
"import java.util.Enumeration;",
"import java.util.Properties;",
"",
"import javax.naming.NamingException;",
"import javax.naming.RefAddr;",
"import javax.naming.Reference;",
"import org.apache.derby.shared.common.reference.Attribute;"
],
"header": "@@ -21,8 +21,15 @@",
"removed": []
},
{
"added": [
" getProperties(dataSource));"
],
"header": "@@ -1046,7 +1053,7 @@ public class LogWriter {",
"removed": [
" dataSource.getProperties());"
]
},
{
"added": [
" getProperties(dataSource));"
],
"header": "@@ -1060,7 +1067,7 @@ public class LogWriter {",
"removed": [
" dataSource.getProperties());"
]
}
]
},
{
"file": "java/client/org/apache/derby/jdbc/ClientBaseDataSource.java",
"hunks": [
{
"added": [
"import java.util.Enumeration;"
],
"header": "@@ -22,6 +22,7 @@ package org.apache.derby.jdbc;",
"removed": []
},
{
"added": [
"",
"import javax.naming.RefAddr;"
],
"header": "@@ -29,6 +30,8 @@ import java.lang.reflect.Field;",
"removed": []
},
{
"added": [
" private int loginTimeout;"
],
"header": "@@ -101,9 +104,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" private int loginTimeout = propertyDefault_loginTimeout;",
" public final static String propertyKey_loginTimeout = \"loginTimeout\";",
" public static final int propertyDefault_loginTimeout = 0;"
]
},
{
"added": [
" ",
" "
],
"header": "@@ -138,16 +139,14 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_databaseName = \"databaseName\";",
"",
" public final static String propertyKey_description = \"description\";",
""
]
},
{
"added": [
" ",
" ",
" "
],
"header": "@@ -155,21 +154,18 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_dataSourceName = \"dataSourceName\";",
"",
" public final static String propertyKey_portNumber = \"portNumber\";",
"",
" public final static String propertyKey_serverName = \"serverName\";",
""
]
},
{
"added": [],
"header": "@@ -185,7 +181,6 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_user = \"user\";"
]
},
{
"added": [],
"header": "@@ -238,8 +233,6 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_securityMechanism = \"securityMechanism\";",
""
]
},
{
"added": [],
"header": "@@ -310,8 +303,6 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_retrieveMessageText = \"retrieveMessageText\";",
""
]
},
{
"added": [
" "
],
"header": "@@ -321,8 +312,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_traceFile = \"traceFile\";",
""
]
},
{
"added": [
" "
],
"header": "@@ -332,8 +322,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_traceDirectory = \"traceDirectory\";",
""
]
},
{
"added": [
" "
],
"header": "@@ -342,8 +331,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_traceFileAppend = \"traceFileAppend\";",
""
]
},
{
"added": [],
"header": "@@ -354,7 +342,6 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_password = \"password\";"
]
},
{
"added": [],
"header": "@@ -456,63 +443,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" /**",
" * Not an external. Do not document in pubs. Returns all non-transient properties of a ClientBaseDataSource.",
" */",
" public Properties getProperties() throws SqlException {",
" Properties properties = new Properties();",
"",
" Class clz = getClass();",
" Field[] fields = clz.getFields();",
" for (int i = 0; i < fields.length; i++) {",
" String name = fields[i].getName();",
" if (name.startsWith(\"propertyKey_\")) {",
" if (Modifier.isTransient(fields[i].getModifiers())) {",
" continue; // if it is transient, then skip this propertyKey.",
" }",
" try {",
" String propertyKey = fields[i].get(this).toString();",
" // search for property field.",
" Field propertyField;",
" clz = getClass(); // start from current class.",
" while (true) {",
" try {",
" propertyField = clz.getDeclaredField(name.substring(12));",
" break; // found the property field, so break the while loop.",
" } catch (NoSuchFieldException nsfe) {",
" // property field is not found at current level of class, so continue to super class.",
" clz = clz.getSuperclass();",
" if (clz == Object.class) {",
" throw new SqlException(new LogWriter(logWriter, traceLevel), \"bug check: corresponding property field does not exist\");",
" }",
" continue;",
" }",
" }",
"",
" if (!Modifier.isTransient(propertyField.getModifiers())) {",
" // if the property is not transient:",
" // get the property.",
" propertyField.setAccessible(true);",
" Object propertyObj = propertyField.get(this);",
" String property = String.valueOf(propertyObj); // don't use toString becuase it may be null.",
" if (\"password\".equals(propertyKey)) {",
" StringBuffer sb = new StringBuffer(property);",
" for (int j = 0; j < property.length(); j++) {",
" sb.setCharAt(j, '*');",
" }",
" property = sb.toString();",
" }",
" // add into prperties.",
" properties.setProperty(propertyKey, property);",
" }",
" } catch (IllegalAccessException e) {",
" throw new SqlException(new LogWriter(this.logWriter, this.traceLevel), \"bug check: property cannot be accessed\");",
" }",
" }",
" }",
" return properties;",
" }"
]
},
{
"added": [],
"header": "@@ -835,7 +766,6 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static String propertyKey_connectionAttributes = \"connectionAttributes\";"
]
}
]
},
{
"file": "java/shared/org/apache/derby/shared/common/reference/Attribute.java",
"hunks": [
{
"added": [
" /**",
" * traceLevel.",
" * Client driver attribute.",
" */",
" String CLIENT_TRACE_LEVEL = \"traceLevel\";",
" "
],
"header": "@@ -200,6 +200,12 @@ public interface Attribute {",
"removed": []
}
]
}
] |
derby-DERBY-446-62d2020e
|
DERBY-446 (partial) Make ClientDataSource use public setter and getter methods as standard
for its Java bean properties. Avoids security exceptions or requiring extreme security
permissions in order to create a Reference from the data source. Staged development, subsequent
commits will clean up some of the remants of the old code, by removing or moving the "propertyKey_*" fields.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@379536 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/client/org/apache/derby/client/ClientDataSourceFactory.java",
"hunks": [
{
"added": [
"import java.lang.reflect.Method;",
"import java.util.Enumeration;",
"",
"import javax.naming.RefAddr;",
"import javax.naming.Reference;",
""
],
"header": "@@ -20,6 +20,12 @@",
"removed": []
}
]
},
{
"file": "java/client/org/apache/derby/jdbc/ClientBaseDataSource.java",
"hunks": [
{
"added": [
"import java.lang.reflect.InvocationTargetException;",
"import java.lang.reflect.Method;"
],
"header": "@@ -26,20 +26,17 @@ import java.util.Properties;",
"removed": [
"import java.security.AccessController;",
"import java.security.PrivilegedActionException;",
"import java.sql.SQLException;",
"import javax.naming.RefAddr;",
"import org.apache.derby.client.am.SetAccessibleAction;"
]
},
{
"added": [
" "
],
"header": "@@ -50,7 +47,7 @@ import org.apache.derby.client.ClientDataSourceFactory;",
"removed": [
""
]
},
{
"added": [
" private int loginTimeout = propertyDefault_loginTimeout;"
],
"header": "@@ -72,7 +69,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected int loginTimeout = propertyDefault_loginTimeout;"
]
},
{
"added": [
" private String databaseName;"
],
"header": "@@ -108,7 +105,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String databaseName;"
]
},
{
"added": [
" private String description;"
],
"header": "@@ -116,7 +113,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String description;"
]
},
{
"added": [
" private String dataSourceName;",
" private int portNumber = propertyDefault_portNumber;",
" private String serverName = propertyDefault_serverName;"
],
"header": "@@ -125,19 +122,19 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String dataSourceName;",
" protected int portNumber = propertyDefault_portNumber;",
" protected String serverName = propertyDefault_serverName;"
]
},
{
"added": [
" private String user = propertyDefault_user;"
],
"header": "@@ -155,7 +152,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String user = propertyDefault_user;"
]
},
{
"added": [
" private boolean retrieveMessageText = propertyDefault_retrieveMessageText;"
],
"header": "@@ -224,7 +221,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected boolean retrieveMessageText = propertyDefault_retrieveMessageText;"
]
},
{
"added": [
" private String traceFile;"
],
"header": "@@ -236,7 +233,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String traceFile;"
]
},
{
"added": [
" private String traceDirectory;"
],
"header": "@@ -247,7 +244,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String traceDirectory;"
]
},
{
"added": [
" private boolean traceFileAppend = propertyDefault_traceFileAppend;"
],
"header": "@@ -256,7 +253,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected boolean traceFileAppend = propertyDefault_traceFileAppend;"
]
},
{
"added": [
" private String password;"
],
"header": "@@ -276,7 +273,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" protected String password;"
]
}
]
}
] |
derby-DERBY-446-ceb72fea
|
DERBY-446 (partial) Copy Attribute.java to the java/shared reference area and add in constants
for the client JDBC attributes. Change the client code to use these constants as preparation to
removing these constants from the user visible classes.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@388687 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/client/org/apache/derby/jdbc/ClientBaseDataSource.java",
"hunks": [
{
"added": [
"import org.apache.derby.shared.common.reference.Attribute;"
],
"header": "@@ -41,6 +41,7 @@ import org.apache.derby.client.am.Connection;",
"removed": []
},
{
"added": [
" String userString = properties.getProperty(Attribute.USERNAME_ATTR);"
],
"header": "@@ -188,7 +189,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" String userString = properties.getProperty(propertyKey_user);"
]
},
{
"added": [
" String securityMechanismString = properties.getProperty(Attribute.CLIENT_SECURITY_MECHANISM);"
],
"header": "@@ -251,7 +252,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" String securityMechanismString = properties.getProperty(propertyKey_securityMechanism);"
]
},
{
"added": [
" String passwordString = properties.getProperty(Attribute.PASSWORD_ATTR);"
],
"header": "@@ -265,7 +266,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" String passwordString = properties.getProperty(propertyKey_password);"
]
},
{
"added": [
" String retrieveMessageTextString = properties.getProperty(Attribute.CLIENT_RETIEVE_MESSAGE_TEXT);"
],
"header": "@@ -313,7 +314,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" String retrieveMessageTextString = properties.getProperty(propertyKey_retrieveMessageText);"
]
},
{
"added": [
" return properties.getProperty(Attribute.CLIENT_TRACE_FILE);"
],
"header": "@@ -323,7 +324,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" return properties.getProperty(propertyKey_traceFile);"
]
},
{
"added": [
" return properties.getProperty(Attribute.CLIENT_TRACE_DIRECTORY);"
],
"header": "@@ -334,7 +335,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" return properties.getProperty(propertyKey_traceDirectory);"
]
},
{
"added": [
" String traceFileAppendString = properties.getProperty(Attribute.CLIENT_TRACE_APPEND);"
],
"header": "@@ -344,7 +345,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" String traceFileAppendString = properties.getProperty(propertyKey_traceFileAppend);"
]
},
{
"added": [
" checkBoolean(augmentedProperties, Attribute.CLIENT_RETIEVE_MESSAGE_TEXT);"
],
"header": "@@ -670,7 +671,7 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" checkBoolean(augmentedProperties, propertyKey_retrieveMessageText);"
]
}
]
},
{
"file": "java/client/org/apache/derby/jdbc/ClientDriver.java",
"hunks": [
{
"added": [
"import org.apache.derby.shared.common.reference.Attribute;"
],
"header": "@@ -31,6 +31,7 @@ import org.apache.derby.client.am.Version;",
"removed": []
},
{
"added": [
"\t\t\tif (key.equals(Attribute.USERNAME_ATTR) || ",
"\t\t\t\tkey.equals(Attribute.PASSWORD_ATTR))"
],
"header": "@@ -164,8 +165,8 @@ public class ClientDriver implements java.sql.Driver {",
"removed": [
"\t\t\tif (key.equals(ClientDataSource.propertyKey_user) || ",
"\t\t\t\tkey.equals(ClientDataSource.propertyKey_password))"
]
}
]
},
{
"file": "java/shared/org/apache/derby/shared/common/reference/Attribute.java",
"hunks": [
{
"added": [
"/*",
"",
" Derby - Class org.apache.derby.shared.common.reference.Attribute",
"",
" Copyright 1999, 2006 The Apache Software Foundation or its licensors, as applicable.",
"",
" Licensed under the Apache License, Version 2.0 (the \"License\");",
" you may not use this file except in compliance with the License.",
" You may obtain a copy of the License at",
"",
" http://www.apache.org/licenses/LICENSE-2.0",
"",
" Unless required by applicable law or agreed to in writing, software",
" distributed under the License is distributed on an \"AS IS\" BASIS,",
" WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
" See the License for the specific language governing permissions and",
" limitations under the License.",
"",
" */",
"",
"package org.apache.derby.shared.common.reference;",
"",
"/**",
" * List of all connection (JDBC) attributes by the system.",
" * ",
" * ",
" * <P>",
" * This class exists for two reasons",
" * <Ol>",
" * <LI> To act as the internal documentation for the attributes.",
" * <LI> To remove the need to declare a java static field for the attributes",
" * name in the protocol/implementation class. This reduces the footprint as the",
" * string is final and thus can be included simply as a String constant pool",
" * entry.",
" * </OL>",
" * <P>",
" * This class should not be shipped with the product.",
" * ",
" * <P>",
" * This class has no methods, all it contains are String's which by are public,",
" * static and final since they are declared in an interface.",
" */",
"",
"public interface Attribute {",
"",
" /**",
" * Not an attribute but the root for the JDBC URL that Derby supports.",
" */",
" String PROTOCOL = \"jdbc:derby:\";",
"",
" /**",
" * The SQLJ protocol for getting the default connection for server side jdbc",
" */",
" String SQLJ_NESTED = \"jdbc:default:connection\";",
"",
" // Network Protocols. These need to be rejected by the embedded driver.",
"",
" /**",
" * The protocol for Derby Network Client",
" */",
" String DNC_PROTOCOL = \"jdbc:derby://\";",
"",
" /**",
" * The protocol for the IBM Universal JDBC Driver",
" * ",
" */",
" String JCC_PROTOCOL = \"jdbc:derby:net:\";",
"",
" /**",
" * Attribute name to encrypt the database on disk. If set to true, all user",
" * data is stored encrypted on disk.",
" */",
" String DATA_ENCRYPTION = \"dataEncryption\";",
"",
" /**",
" * If dataEncryption is true, use this attribute to pass in the secret key.",
" * The secret key must be at least 8 characters long. This key must not be",
" * stored persistently in cleartext anywhere.",
" */",
"",
" String BOOT_PASSWORD = \"bootPassword\";",
"",
" /**",
" * The attribute that is used for the database name, from the JDBC notion of",
" * jdbc:<subprotocol>:<subname>",
" */",
" String DBNAME_ATTR = \"databaseName\";",
"",
" /**",
" * The attribute that is used to request a shutdown.",
" */",
" String SHUTDOWN_ATTR = \"shutdown\";",
"",
" /**",
" * The attribute that is used to request a database create.",
" */",
" String CREATE_ATTR = \"create\";",
"",
" /**",
" * The attribute that is used to set the user name.",
" */",
" String USERNAME_ATTR = \"user\";",
"",
" /**",
" * The attribute that is used to set the user password.",
" */",
" String PASSWORD_ATTR = \"password\";",
"",
" /**",
" * The attribute that is used to set the connection's DRDA ID.",
" */",
" String DRDAID_ATTR = \"drdaID\";",
"",
" /**",
" * The attribute that is used to allow upgrade.",
" */",
" String UPGRADE_ATTR = \"upgrade\";",
"",
" /**",
" * Put the log on a different device.",
" */",
" String LOG_DEVICE = \"logDevice\";",
"",
" /**",
" * Set the territory for the database.",
" */",
" String TERRITORY = \"territory\";",
"",
" /**",
" * Set the collation sequence of the database, currently on IDENTITY will be",
" * supported (strings will sort according to binary comparison).",
" */",
" String COLLATE = \"collate\";",
"",
" /**",
" * Attribute for encrypting a database. Specifies the cryptographic services",
" * provider.",
" */",
" String CRYPTO_PROVIDER = \"encryptionProvider\";",
"",
" /**",
" * Attribute for encrypting a database. Specifies the cryptographic",
" * algorithm.",
" */",
" String CRYPTO_ALGORITHM = \"encryptionAlgorithm\";",
"",
" /**",
" * Attribute for encrypting a database. Specifies the key length in bytes",
" * for the specified cryptographic algorithm.",
" */",
" String CRYPTO_KEY_LENGTH = \"encryptionKeyLength\";",
"",
" /**",
" * Attribute for encrypting a database. Specifies the actual key. When this",
" * is specified all the supplied crypto information is stored external to",
" * the database, ie by the application.",
" */",
" String CRYPTO_EXTERNAL_KEY = \"encryptionKey\";",
"",
" /**",
" * This attribute is used to request to create a database from backup. This",
" * will throw error if a database with same already exists at the location",
" * where we tring to create.",
" */",
" String CREATE_FROM = \"createFrom\";",
"",
" /**",
" * This attribute is used to request a database restore from backup. It must",
" * be used only when the active database is corrupted, because it will",
" * cleanup the existing database and replace it from the backup.",
" */",
" String RESTORE_FROM = \"restoreFrom\";",
"",
" /**",
" * The attribute that is used to request a roll-forward recovery of the",
" * database.",
" */",
" String ROLL_FORWARD_RECOVERY_FROM = \"rollForwardRecoveryFrom\";",
"",
" /**",
" * securityMechanism sets the mechanism for transmitting the user name and",
" * password from the client. Client driver attribute.",
" */",
" String CLIENT_SECURITY_MECHANISM = \"securityMechanism\";",
"",
" /**",
" * traceFile sets the client side trace file. Client driver attribute.",
" */",
" String CLIENT_TRACE_FILE = \"traceFile\";",
"",
" /**",
" * traceDirectory sets the client side trace directory.",
" * Client driver attribute.",
" */",
" String CLIENT_TRACE_DIRECTORY = \"traceDirectory\";",
" ",
" /**",
" * traceFileAppend.",
" * Client driver attribute.",
" */",
" String CLIENT_TRACE_APPEND = \"traceFileAppend\";",
" ",
" /**",
" * retrieveMessageText.",
" * Client driver attribute.",
" */ ",
" String CLIENT_RETIEVE_MESSAGE_TEXT = \"retrieveMessageText\";",
"}",
"",
""
],
"header": "@@ -0,0 +1,210 @@",
"removed": []
}
]
}
] |
derby-DERBY-446-f86608b1
|
DERBY-446 Remove holdability constants from ClientBaseDataSource and instead use
the identical values in JDBC30Translation.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@379565 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/client/org/apache/derby/client/am/DatabaseMetaData.java",
"hunks": [
{
"added": [
"import org.apache.derby.shared.common.reference.JDBC30Translation;"
],
"header": "@@ -24,7 +24,7 @@ import java.sql.SQLException;",
"removed": [
""
]
},
{
"added": [
" if (connection_.resultSetHoldability_ == JDBC30Translation.HOLD_CURSORS_OVER_COMMIT) {"
],
"header": "@@ -1282,7 +1282,7 @@ public abstract class DatabaseMetaData implements java.sql.DatabaseMetaData {",
"removed": [
" if (connection_.resultSetHoldability_ == ClientDataSource.HOLD_CURSORS_OVER_COMMIT) {"
]
},
{
"added": [
" if (connection_.resultSetHoldability_ == JDBC30Translation.HOLD_CURSORS_OVER_COMMIT) {"
],
"header": "@@ -1629,7 +1629,7 @@ public abstract class DatabaseMetaData implements java.sql.DatabaseMetaData {",
"removed": [
" if (connection_.resultSetHoldability_ == ClientDataSource.HOLD_CURSORS_OVER_COMMIT) {"
]
},
{
"added": [
" if (connection_.resultSetHoldability_ == JDBC30Translation.HOLD_CURSORS_OVER_COMMIT) {"
],
"header": "@@ -1682,7 +1682,7 @@ public abstract class DatabaseMetaData implements java.sql.DatabaseMetaData {",
"removed": [
" if (connection_.resultSetHoldability_ == ClientDataSource.HOLD_CURSORS_OVER_COMMIT) {"
]
}
]
},
{
"file": "java/client/org/apache/derby/client/am/SectionManager.java",
"hunks": [
{
"added": [
"import org.apache.derby.shared.common.reference.JDBC30Translation;",
""
],
"header": "@@ -20,6 +20,8 @@",
"removed": []
},
{
"added": [
" if (resultSetHoldability == JDBC30Translation.HOLD_CURSORS_OVER_COMMIT) {",
" } else if (resultSetHoldability == JDBC30Translation.CLOSE_CURSORS_AT_COMMIT) {"
],
"header": "@@ -96,9 +98,9 @@ public class SectionManager {",
"removed": [
" if (resultSetHoldability == org.apache.derby.jdbc.ClientDataSource.HOLD_CURSORS_OVER_COMMIT) {",
" } else if (resultSetHoldability == org.apache.derby.jdbc.ClientDataSource.CLOSE_CURSORS_AT_COMMIT) {"
]
},
{
"added": [
" if (resultSetHoldability == JDBC30Translation.HOLD_CURSORS_OVER_COMMIT) {",
" } else if (resultSetHoldability == JDBC30Translation.CLOSE_CURSORS_AT_COMMIT) {"
],
"header": "@@ -109,9 +111,9 @@ public class SectionManager {",
"removed": [
" if (resultSetHoldability == org.apache.derby.jdbc.ClientDataSource.HOLD_CURSORS_OVER_COMMIT) {",
" } else if (resultSetHoldability == org.apache.derby.jdbc.ClientDataSource.CLOSE_CURSORS_AT_COMMIT) {"
]
}
]
},
{
"file": "java/client/org/apache/derby/client/am/Statement.java",
"hunks": [
{
"added": [
"import org.apache.derby.shared.common.reference.JDBC30Translation;",
""
],
"header": "@@ -21,6 +21,8 @@ package org.apache.derby.client.am;",
"removed": []
}
]
},
{
"file": "java/client/org/apache/derby/client/net/NetDatabaseMetaData.java",
"hunks": [
{
"added": [
"import org.apache.derby.shared.common.reference.JDBC30Translation;"
],
"header": "@@ -22,6 +22,7 @@ package org.apache.derby.client.net;",
"removed": []
}
]
},
{
"file": "java/client/org/apache/derby/client/net/NetStatementReply.java",
"hunks": [
{
"added": [
"import org.apache.derby.shared.common.reference.JDBC30Translation;"
],
"header": "@@ -30,6 +30,7 @@ import org.apache.derby.client.am.Statement;",
"removed": []
}
]
},
{
"file": "java/client/org/apache/derby/jdbc/ClientBaseDataSource.java",
"hunks": [
{
"added": [],
"header": "@@ -161,10 +161,6 @@ public abstract class ClientBaseDataSource implements Serializable, Referenceabl",
"removed": [
" public final static int HOLD_CURSORS_OVER_COMMIT = 1; // this matches jdbc 3 ResultSet.HOLD_CURSORS_OVER_COMMIT",
" public final static int CLOSE_CURSORS_AT_COMMIT = 2; // this matches jdbc 3 ResultSet.CLOSE_CURSORS_AT_COMMIT",
"",
""
]
}
]
}
] |
derby-DERBY-4463-14ff5da5
|
DERBY-4463 JMX test in nightly test suite failed with: JMXTest:clientjava.lang.InterruptedException:
Patch derby-4463-2, which now resets the interrupt flag for the server
threads also before the exception is propagated in the test. Followup
to derby-4463 patch.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@889875 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4463-75bf63db
|
DERBY-4463; JMX test in nightly test suite failed with: JMXTest:clientjava.lang.InterruptedException
Causing the LockInterruptTest to be skipped with ibm 1.5 jvm unil SR13 is
available; this test caused a jvm bug in the handling of waitFor() to pop up.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1071640 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4463-7f3c16bc
|
DERBY-4463 JMX test in nightly test suite failed with: JMXTest:clientjava.lang.InterruptedException:
Patch derby-4463.diff, which moves the clearing of the interrupted
flag in Derby151Test to the teardown method of the test so it will
always be performed.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@888881 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4463-f24f53cb
|
DERBY-4463; JMX test in nightly test suite failed with: JMXTest:clientjava.lang.InterruptedException
DERBY-5028; InterruptResilienceTest passes with IBM 1.6 SR9 but creates javacore dumps
Adjusted the skipping of this test with ibm jvms to only skip with 1.5;
Added setting of derby.stream.error.extendedDiagSeverityLevel to 50000 to
prevent unnecessary javacore dump files.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1071545 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4463-fc9859eb
|
DERBY-4463 JMX test in nightly test suite failed with: JMXTest:clientjava.lang.InterruptedException
Patch derby-4463-except-non-sun-vm disables this test for non-Sun
VMs. This is intended as a temporary measure until we understand why
the test is not working correctly on IBM VMs.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@892316 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4469-30fa79be
|
DERBY-4469: Forbid implicit and explicit casts to and from UDTs.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@909190 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/BaseTypeCompiler.java",
"hunks": [
{
"added": [
" if ( otherType.getBaseTypeId().isAnsiUDT() ) { return false; }"
],
"header": "@@ -274,6 +274,7 @@ abstract class BaseTypeCompiler implements TypeCompiler",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/JavaToSQLValueNode.java",
"hunks": [
{
"added": [
"\t\tDataTypeDescriptor dts = javaNode.getDataType();"
],
"header": "@@ -246,7 +246,7 @@ public class JavaToSQLValueNode extends ValueNode",
"removed": [
"\t\tDataTypeDescriptor dts = DataTypeDescriptor.getSQLDataTypeDescriptor(javaNode.getJavaTypeName());"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/UserDefinedTypeCompiler.java",
"hunks": [
{
"added": [
"import org.apache.derby.catalog.types.UserDefinedTypeIdImpl;"
],
"header": "@@ -30,6 +30,7 @@ import org.apache.derby.iapi.types.DataValueFactory;",
"removed": []
},
{
"added": [
"\t * Right now, casting is not allowed from one user defined type",
" * to another."
],
"header": "@@ -42,11 +43,8 @@ public class UserDefinedTypeCompiler extends BaseTypeCompiler",
"removed": [
"\t * User types are convertible to other user types only if",
"\t * (for now) they are the same type and are being used to",
"\t * implement some JDBC type. This is sufficient for",
"\t * date/time types; it may be generalized later for e.g.",
"\t * comparison of any user type with one of its subtypes."
]
},
{
"added": [
" if ( getTypeId().getBaseTypeId().isAnsiUDT() )",
" {",
" if ( !otherType.getBaseTypeId().isAnsiUDT() ) { return false; }",
" ",
" UserDefinedTypeIdImpl thisTypeID = (UserDefinedTypeIdImpl) getTypeId().getBaseTypeId();",
" UserDefinedTypeIdImpl thatTypeID = (UserDefinedTypeIdImpl) otherType.getBaseTypeId();",
" ",
" return thisTypeID.getSQLTypeName().equals( thatTypeID.getSQLTypeName() );",
" }",
" ",
"\t\t** We are a non-ANSI user defined type, we are"
],
"header": "@@ -56,8 +54,18 @@ public class UserDefinedTypeCompiler extends BaseTypeCompiler",
"removed": [
"\t\t** We are a user defined type, we are"
]
}
]
}
] |
derby-DERBY-4469-816219fa
|
DERBY-4469: Allow casting of nulls and ? parameters to UDTs.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@889975 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-447-b94443fd
|
DERBY-1184: registerOutParameter(int,int,String) should throw exception
Patch contributed by Kristian Waagan (Kristian.Waagan@Sun.com)
The method 'CallableStatement.registerOutParameter(int,int,String)'
does nothing in the client driver. As stated in DERBY-447, the method throws
a not-implemented exception in the embedded driver. The method should be
changed to do this on the client side as well.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@392304 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4470-f2a56222
|
DERBY-4470: Forbid ordering operations on UDTs.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@908635 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/types/TypeId.java",
"hunks": [
{
"added": [
" private static final TypeId[] ALL_BUILTIN_TYPE_IDS =",
" {",
" BOOLEAN_ID,",
" SMALLINT_ID,",
" INTEGER_ID,",
" CHAR_ID,",
" TINYINT_ID,",
" BIGINT_ID,",
" REAL_ID,",
" DOUBLE_ID,",
" DECIMAL_ID,",
" NUMERIC_ID,",
" VARCHAR_ID,",
" DATE_ID,",
" TIME_ID,",
" TIMESTAMP_ID,",
" BIT_ID,",
" VARBIT_ID,",
" REF_ID,",
" LONGVARCHAR_ID,",
" LONGVARBIT_ID,",
" BLOB_ID,",
" CLOB_ID,",
" XML_ID,",
" };",
""
],
"header": "@@ -267,6 +267,32 @@ public final class TypeId",
"removed": []
},
{
"added": [
"",
" /**",
" * Return all of the builtin type ids.",
" */",
" public static TypeId[] getAllBuiltinTypeIds()",
" {",
" int count = ALL_BUILTIN_TYPE_IDS.length;",
"",
" TypeId[] retval = new TypeId[ count ];",
"",
" for ( int i = 0; i < count; i++ ) { retval[ i ] = ALL_BUILTIN_TYPE_IDS[ i ]; }",
"",
" return retval;",
" }"
],
"header": "@@ -286,6 +312,20 @@ public final class TypeId",
"removed": []
},
{
"added": [
" javaTypeName = \"java.sql.Ref\";"
],
"header": "@@ -795,6 +835,7 @@ public final class TypeId",
"removed": []
},
{
"added": [
" // The following code is disabled until we support",
" // comparable UDTs.",
" return false;",
"",
" // UserDefinedTypeIdImpl baseUserTypeId =",
" // (UserDefinedTypeIdImpl) baseTypeId;",
" //",
" // String className = baseUserTypeId.getClassName();",
" //",
" // try ",
" // {",
" // Class c = cf.getClassInspector().getClass(className);",
" // orderable = java.lang.Comparable.class.isAssignableFrom(c);",
" // } ",
" // catch (ClassNotFoundException cnfe) ",
" // {",
" // orderable = false;",
" // } ",
" // break;"
],
"header": "@@ -1123,24 +1164,28 @@ public final class TypeId",
"removed": [
" UserDefinedTypeIdImpl baseUserTypeId =",
" (UserDefinedTypeIdImpl) baseTypeId;",
"",
" String className = baseUserTypeId.getClassName();",
"",
" try ",
" {",
" Class c = cf.getClassInspector().getClass(className);",
" orderable = java.lang.Comparable.class.isAssignableFrom(c);",
" } ",
" catch (ClassNotFoundException cnfe) ",
" {",
" orderable = false;",
" } ",
" break;"
]
}
]
}
] |
derby-DERBY-4475-b8801c5d
|
DERBY-4475 roleName isn't trimmed as expected
Patch derby-4475, which contains a single line patch contributed by
dbrosius at mebigfatguy dot com, plus some extra test cases (mine).
The patch corrects a glitch in trimming white space around role name
when provided as strings.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@891350 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4476-e96b5a28
|
DERBY-4476: Use helper methods from IdUtil instead of TupleDescriptor.quoteProtectName()
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@890345 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/AliasDescriptor.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.util.IdUtil;"
],
"header": "@@ -40,6 +40,7 @@ import\torg.apache.derby.catalog.DependableFinder;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/TableDescriptor.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.util.IdUtil;"
],
"header": "@@ -41,6 +41,7 @@ import org.apache.derby.iapi.sql.depend.Dependent;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/TupleDescriptor.java",
"hunks": [
{
"added": [],
"header": "@@ -91,40 +91,6 @@ public class TupleDescriptor",
"removed": [
"\t/**",
"\t * If the name has double quotes in it, put two double quotes for every single",
"\t * double quote.",
"\t * Finally put double quotes around string to protect against",
"\t * names with blanks, reserved words being used as identifiers etc.",
"\t * For eg, if table name is m\"n, return it as \"m\"\"n\". For now, this is used",
"\t * by DMLModStatementNode.parseCheckConstraint().",
"\t *",
"\t * Possible improvement: We could possibly analyze string to",
"\t * avoid double quotes in normal cases.",
"\t *",
"\t * @param name\tThe String with or without double quotes",
"\t *",
"\t * @return\tThe quoted String",
"\t */",
"",
"\tpublic String quoteProtectName(String name)",
"\t{",
"\t\tString quotedString = name;",
"\t\tint quotePos = name.indexOf(\"\\\"\");",
"",
"\t\tif (quotePos == -1)",
"\t\t\treturn \"\\\"\" + name + \"\\\"\";",
"",
"\t\t//string does have quotes in it.",
"\t\twhile(quotePos != -1) {",
"\t\t\tquotedString = quotedString.substring(0,quotePos) + \"\\\"\" +",
"\t\t\t\tquotedString.substring(quotePos);",
"\t\t\tquotePos = quotedString.indexOf(\"\\\"\",quotePos+2);",
"\t\t}",
"\t\treturn \"\\\"\" + quotedString + \"\\\"\";",
"",
"\t}",
""
]
}
]
}
] |
derby-DERBY-4477-51826c3c
|
DERBY-4477 Selecting / projecting a column whose value is represented by a stream more than once fails
Follow-up patch for an intermittet bug caused by previous insert, due
to newly filed DERBY-4531: derby-4477-lowmem-followup.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@905621 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/testing/org/apache/derbyTesting/functionTests/util/streams/LoopingAlphabetReader.java",
"hunks": [
{
"added": [],
"header": "@@ -176,15 +176,6 @@ public class LoopingAlphabetReader",
"removed": [
" /**",
" * Reopen the stream.",
" */",
" public void reopen()",
" throws IOException {",
" this.closed = false;",
" reset();",
" }",
""
]
}
]
}
] |
derby-DERBY-4477-55bc97fc
|
DERBY-4477 Selecting / projecting a column whose value is represented by a stream more than once fails
Patch derby-4477-lowmem-2, which adds test cases to check that lobs
are not materialized when large, for the use cases covered by this
issue. The test cases are added to the lowmem suite, which is not not
part of the regular suites.All. This commit is preparatory in that the
lobs are still small, so these changes should be revisited to change
their sizes when the cloning handles materialization properly, cf
DERBY-3650 and DERBY-4520.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@904538 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/testing/org/apache/derbyTesting/functionTests/util/streams/LoopingAlphabetReader.java",
"hunks": [
{
"added": [
" /**",
" * Reopen the stream.",
" */",
" public void reopen()",
" throws IOException {",
" this.closed = false;",
" reset();",
" }",
""
],
"header": "@@ -176,6 +176,15 @@ public class LoopingAlphabetReader",
"removed": []
}
]
}
] |
derby-DERBY-4478-99c48bc0
|
DERBY-4478: Use AtomicLong for XactFactory.tranId
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1503157 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/store/raw/xact/XactFactory.java",
"hunks": [
{
"added": [
"import java.util.concurrent.atomic.AtomicLong;"
],
"header": "@@ -61,6 +61,7 @@ import org.apache.derby.iapi.error.StandardException;",
"removed": []
},
{
"added": [
" /** The id of the next transaction to be started. */",
" private final AtomicLong tranId = new AtomicLong();"
],
"header": "@@ -87,7 +88,8 @@ public class XactFactory implements TransactionFactory, ModuleControl, ModuleSup",
"removed": [
"\tprivate long\ttranId;"
]
},
{
"added": [
" XactId xid = new XactId(tranId.getAndIncrement());"
],
"header": "@@ -692,16 +694,12 @@ public class XactFactory implements TransactionFactory, ModuleControl, ModuleSup",
"removed": [
"\t\tXactId xid;",
"\t\tsynchronized(this)",
"\t\t{",
"\t\t\txid = new XactId(tranId++);",
"\t\t}"
]
},
{
"added": [
" long highestId = (xid == null) ? 0L : xid.getId();",
" tranId.set(highestId + 1);"
],
"header": "@@ -725,10 +723,8 @@ public class XactFactory implements TransactionFactory, ModuleControl, ModuleSup",
"removed": [
"\t\tif (xid != null)",
"\t\t\ttranId = xid.getId() + 1;",
"\t\telse",
"\t\t\ttranId = 1;"
]
}
]
}
] |
derby-DERBY-4479-f1f66ff2
|
DERBY-4479: RENAME TABLE needs to invalidate any cached CREATE TABLE statement
The problem involves a missing dependency between the CREATE TABLE statement
and the table that it is creating. For other types of statements, the
dependency of the statement on the table is generally registered during
compilation.
However, this isn't quite so easy with the CREATE TABLE statement, because
the table descriptor doesn't exist during compilation, since the table isn't
created until the statement is actually executed.
This change registers the dependency at execution time. At the very end of
CreateTableConstantAction.executeConstantAction, there is
some new code added to register a dependency from the CREATE TABLE
statement to the table we have just finished creating.
A new test is also added.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@909176 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4480-7d6c180f
|
DERBY-4480: "No suitable driver found" when attempting to connect while other thread is auto-loading the driver
Add test case to verify that it does not affect Java 7 and newer.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1442937 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4483-178ca0cf
|
DERBY-4483: Make toHexByte() private to discourage its use in new code
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@926520 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/jdbc/authentication/AuthenticationServiceBase.java",
"hunks": [
{
"added": [
" bytePasswd = toHexByte(plainTxtUserPassword);"
],
"header": "@@ -468,8 +468,7 @@ public abstract class AuthenticationServiceBase",
"removed": [
" bytePasswd = StringUtil.toHexByte(",
" plainTxtUserPassword,0,plainTxtUserPassword.length());"
]
},
{
"added": [
" /**",
" * <p>",
" * Convert a string into a byte array in hex format.",
" * </p>",
" *",
" * <p>",
" * For each character (b) two bytes are generated, the first byte",
" * represents the high nibble (4 bits) in hexadecimal ({@code b & 0xf0}),",
" * the second byte represents the low nibble ({@code b & 0x0f}).",
" * </p>",
" *",
" * <p>",
" * The character at {@code str.charAt(0)} is represented by the first two",
" * bytes in the returned String.",
" * </p>",
" *",
" * <p>",
" * New code is encouraged to use {@code String.getBytes(String)} or similar",
" * methods instead, since this method does not preserve all bits for",
" * characters whose codepoint exceeds 8 bits. This method is preserved for",
" * compatibility with the SHA-1 authentication scheme.",
" * </p>",
" *",
" * @param str string",
" * @return the byte[] (with hexadecimal format) form of the string (str)",
" */",
" private static byte[] toHexByte(String str)",
" {",
" byte[] data = new byte[str.length() * 2];",
"",
" for (int i = 0; i < str.length(); i++)",
" {",
" char ch = str.charAt(i);",
" int high_nibble = (ch & 0xf0) >>> 4;",
" int low_nibble = (ch & 0x0f);",
" data[i] = (byte)high_nibble;",
" data[i+1] = (byte)low_nibble;",
" }",
" return data;",
" }",
""
],
"header": "@@ -478,6 +477,47 @@ public abstract class AuthenticationServiceBase",
"removed": []
},
{
"added": [
" byte[] userBytes = toHexByte(userName);"
],
"header": "@@ -671,7 +711,7 @@ public abstract class AuthenticationServiceBase",
"removed": [
" byte[] userBytes = StringUtil.toHexByte(userName, 0, userName.length());"
]
},
{
"added": [
" bytePasswd = toHexByte(password);"
],
"header": "@@ -699,7 +739,7 @@ public abstract class AuthenticationServiceBase",
"removed": [
" bytePasswd = StringUtil.toHexByte(password, 0, password.length());"
]
},
{
"added": [
" messageDigest.update(toHexByte(hexString));"
],
"header": "@@ -722,8 +762,7 @@ public abstract class AuthenticationServiceBase",
"removed": [
" messageDigest.update(",
" StringUtil.toHexByte(hexString, 0, hexString.length()));"
]
}
]
}
] |
derby-DERBY-4483-1eee3053
|
DERBY-4602, DERBY-4483: Use SHA-1 for BUILTIN authentication if SHA-256 isn't supported
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@929715 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/catalog/DataDictionaryImpl.java",
"hunks": [
{
"added": [
"import java.security.MessageDigest;",
"import java.security.NoSuchAlgorithmException;"
],
"header": "@@ -158,6 +158,8 @@ import java.util.LinkedList;",
"removed": []
},
{
"added": [
" findDefaultBuiltinAlgorithm(),"
],
"header": "@@ -765,7 +767,7 @@ public final class\tDataDictionaryImpl",
"removed": [
" Property.AUTHENTICATION_BUILTIN_ALGORITHM_DEFAULT,"
]
}
]
}
] |
derby-DERBY-4483-23f97a59
|
DERBY-4483: Provide a way to change the hash algorithm used by BUILTIN authentication
Added more comments about the incompatibility between the configurable
hash scheme and strong password substitution.
Changed a symbol that still referred to the SHA-1 based authentication
scheme as the new scheme.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@924746 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/client/org/apache/derby/client/am/EncryptionManager.java",
"hunks": [
{
"added": [
" * This method generates a password substitute to send to the target"
],
"header": "@@ -525,7 +525,7 @@ public class EncryptionManager {",
"removed": [
" * This method generate a password subtitute to send to the target"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/jdbc/authentication/AuthenticationServiceBase.java",
"hunks": [
{
"added": [
" * This method generates a password substitute to authenticate a client"
],
"header": "@@ -594,7 +594,7 @@ public abstract class AuthenticationServiceBase",
"removed": [
" * This method generate a password subtitute to authenticate a client"
]
},
{
"added": [
" * The substitution algorithm used is the same as the one used in the",
" * SHA-1 authentication scheme ({@link #ID_PATTERN_SHA1_SCHEME}), so in",
" * the case of database passwords stored using that scheme, we can simply",
" * compare the received hash with the stored hash. If the configurable",
" * hash authentication scheme {@link #ID_PATTERN_CONFIGURABLE_HASH_SCHEME}",
" * is used, we have no way to find out if the received hash matches the",
" * stored password, since we cannot decrypt the hashed passwords and",
" * re-apply another hash algorithm. Therefore, strong password substitution",
" * only works if the database-level passwords are stored with the SHA-1",
" * scheme.",
" *"
],
"header": "@@ -605,6 +605,17 @@ public abstract class AuthenticationServiceBase",
"removed": []
},
{
"added": [],
"header": "@@ -633,9 +644,6 @@ public abstract class AuthenticationServiceBase",
"removed": [
" // Pattern that is prefixed to the BUILTIN encrypted password",
" String ID_PATTERN_NEW_SCHEME = \"3b60\";",
""
]
},
{
"added": [
" hexString = ID_PATTERN_SHA1_SCHEME +",
" {",
" // NOTE: If the password was stored with the configurable hash",
" // authentication scheme, the stored password will have been hashed",
" // with a different algorithm than the hashed password sent from",
" // the client. Since there's no way to decrypt the stored password",
" // and rehash it with the algorithm that the client uses, we are",
" // not able to compare the passwords, and the connection attempt",
" // will fail.",
" }"
],
"header": "@@ -694,12 +702,21 @@ public abstract class AuthenticationServiceBase",
"removed": [
" hexString = ID_PATTERN_NEW_SCHEME +"
]
}
]
}
] |
derby-DERBY-4483-60edeb0c
|
DERBY-4483: Provide a way to change the hash algorithm used by BUILTIN authentication
Added a database property, derby.authentication.builtin.algorithm,
that specifies which message digest algorithm to use when storing user
credentials in the database.
Added functional tests and upgrade tests.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@922304 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/jdbc/authentication/AuthenticationServiceBase.java",
"hunks": [
{
"added": [],
"header": "@@ -28,7 +28,6 @@ import org.apache.derby.iapi.jdbc.AuthenticationService;",
"removed": [
"import org.apache.derby.iapi.services.i18n.MessageService;"
]
},
{
"added": [
"import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;",
"import org.apache.derby.iapi.sql.dictionary.DataDictionary;",
"",
"import java.io.UnsupportedEncodingException;",
"import org.apache.derby.iapi.reference.SQLState;",
" * <p>",
" * </p>",
"",
" * </p>",
" *",
" * </p>",
" * <p><strong>IMPORTANT NOTE:</strong></p>",
" *",
" * <p>",
" * User passwords are encrypted using a message digest algorithm",
" * </p>",
" *",
" * <p>",
" * The passwords can be encrypted using two different schemes:",
" * </p>",
" *",
" * <ul>",
" * <li>The SHA-1 authentication scheme, which was the only available scheme",
" * in Derby 10.5 and earlier. This scheme uses the SHA-1 message digest",
" * algorithm.</li>",
" * <li>The configurable hash authentication scheme, which allows the users to",
" * specify which message digest algorithm to use.</li>",
" * </ul>",
" *",
" * <p>",
" * In order to use the configurable hash authentication scheme, the users have",
" * to set the {@code derby.authentication.builtin.algorithm} property (on",
" * system level or database level) to the name of an algorithm that's available",
" * in one of the security providers registered on the system. If this property",
" * is not set, or if it's set to NULL or an empty string, the SHA-1",
" * authentication scheme is used.",
" * </p>",
" * <p>",
" * Which scheme to use is decided when a password is about to be stored in the",
" * database. One database may therefore contain passwords stored using",
" * different schemes. In order to determine which scheme to use when comparing",
" * a user's credentials with those stored in the database, the stored password",
" * is prefixed with an identifier that tells which scheme is being used.",
" * Passwords stored using the SHA-1 authentication scheme are prefixed with",
" * {@link #ID_PATTERN_SHA1_SCHEME}. Passwords that are stored using the",
" * configurable hash authentication scheme are prefixed with",
" * {@link #ID_PATTERN_CONFIGURABLE_HASH_SCHEME} and suffixed with the name of",
" * the message digest algorithm.",
" * </p>"
],
"header": "@@ -48,32 +47,75 @@ import org.apache.derby.iapi.reference.Attribute;",
"removed": [
"import java.util.Date;",
" * IMPORTANT NOTE:",
" * --------------",
" * User passwords are encrypted using SHA-1 message digest algorithm",
" * SHA-1 digest is single hash (one way) digest and is considered very",
" * secure (160 bits)."
]
},
{
"added": [
"",
" /**",
" * Pattern that is prefixed to the stored password in the SHA-1",
" * authentication scheme.",
" */",
" public static final String ID_PATTERN_SHA1_SCHEME = \"3b60\";",
"",
" /**",
" * Pattern that is prefixed to the stored password in the configurable",
" * hash authentication scheme.",
" */",
" public static final String ID_PATTERN_CONFIGURABLE_HASH_SCHEME = \"3b61\";",
" /**",
" * The encoding to use when converting the credentials to a byte array",
" * that can be passed to the hash function in the configurable hash scheme.",
" */",
" private static final String ENCODING = \"UTF-8\";",
"",
" /**",
" * Character that separates the hash value from the name of the hash",
" * algorithm in the stored password generated by the configurable hash",
" * authentication scheme.",
" */",
" static final char SEPARATOR_CHAR = ':';"
],
"header": "@@ -88,21 +130,36 @@ public abstract class AuthenticationServiceBase",
"removed": [
"\t/**",
"\t\tPattern that is prefixed to the stored password in the new authentication scheme",
"\t*/",
"\tpublic static final String ID_PATTERN_NEW_SCHEME = \"3b60\";",
"\t/**",
"\t\tLength of the encrypted password in the new authentication scheme",
"\t\tSee Beetle4601",
"\t*/",
"\tpublic static final int MAGICLEN_NEWENCRYPT_SCHEME=44;"
]
},
{
"added": [
" String userName =",
" key.substring(Property.USER_PROPERTY_PREFIX.length());",
" userPassword =",
" encryptUsingDefaultAlgorithm(userName, userPassword, p);"
],
"header": "@@ -349,7 +406,10 @@ public abstract class AuthenticationServiceBase",
"removed": [
"\t\t\tuserPassword = encryptPassword(userPassword);"
]
},
{
"added": [
" * <p>",
" * </p>",
" * <p>",
" * </p>",
" *",
" * <p>",
" * This method is only used by the SHA-1 authentication scheme.",
" * </p>",
" * or {@code null} if the plaintext password is {@code null}",
"\tprotected String encryptPasswordSHA1Scheme(String plainTxtUserPassword)"
],
"header": "@@ -373,17 +433,26 @@ public abstract class AuthenticationServiceBase",
"removed": [
"\tprotected String encryptPassword(String plainTxtUserPassword)"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/jdbc/authentication/BasicAuthenticationServiceImpl.java",
"hunks": [
{
"added": [
"import org.apache.derby.impl.jdbc.Util;",
"import java.sql.SQLException;"
],
"header": "@@ -21,26 +21,20 @@",
"removed": [
"import org.apache.derby.iapi.reference.MessageId;",
"import org.apache.derby.iapi.services.daemon.Serviceable;",
"import org.apache.derby.iapi.services.monitor.ModuleFactory;",
"import org.apache.derby.iapi.services.i18n.MessageService;",
"import org.apache.derby.iapi.store.access.TransactionController;",
"import org.apache.derby.iapi.jdbc.AuthenticationService;",
"import java.io.Serializable;",
"import java.util.Dictionary;"
]
},
{
"added": [
" throws SQLException"
],
"header": "@@ -147,6 +141,7 @@ public final class BasicAuthenticationServiceImpl",
"removed": []
},
{
"added": [
" try {",
" passedUserPassword = encryptPasswordUsingStoredAlgorithm(",
" userName, userPassword, definedUserPassword);",
" } catch (StandardException se) {",
" // The UserAuthenticator interface does not allow us to",
" // throw a StandardException, so convert to SQLException.",
" throw Util.generateCsSQLException(se);",
" }"
],
"header": "@@ -199,7 +194,14 @@ public final class BasicAuthenticationServiceImpl",
"removed": [
" passedUserPassword = encryptPassword(userPassword);"
]
}
]
}
] |
derby-DERBY-4483-8c305e2f
|
DERBY-4483: Provide a way to change the hash algorithm used by BUILTIN authentication
Make fresh databases use the new authentication scheme with SHA-256 by default.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@927368 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4488-300bbebe
|
DERBY-4488: Nullpointer when performing INSERT INTO
Moved setting of the statement context's topResultSet to
NoRowsResultSetImpl.setup() to ensure that it's performed early enough
for all sub-classes (and for InsertResultSet in particular).
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@903108 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/execute/DeleteResultSet.java",
"hunks": [
{
"added": [],
"header": "@@ -200,9 +200,6 @@ class DeleteResultSet extends DMLWriteResultSet",
"removed": [
"\t\t * NOTE: We need to set ourself as the top result set",
"\t\t * if this is not the 1st execution. (Done in constructor",
"\t\t * for 1st execution.)"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java",
"hunks": [
{
"added": [],
"header": "@@ -885,9 +885,6 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": [
"\t\t * NOTE: We need to set ourself as the top result set",
"\t\t * if this is not the 1st execution. (Done in constructor",
"\t\t * for 1st execution.)"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/NoRowsResultSetImpl.java",
"hunks": [
{
"added": [
" private NoPutResultSet[] subqueryTrackingArray;"
],
"header": "@@ -62,7 +62,7 @@ import org.apache.derby.iapi.types.DataValueDescriptor;",
"removed": [
"\tNoPutResultSet[]\tsubqueryTrackingArray;"
]
},
{
"added": [],
"header": "@@ -84,7 +84,6 @@ abstract class NoRowsResultSetImpl implements ResultSet",
"removed": [
"\t\tthrows StandardException"
]
},
{
"added": [],
"header": "@@ -102,12 +101,6 @@ abstract class NoRowsResultSetImpl implements ResultSet",
"removed": [
"",
"\t\tStatementContext sc = lcc.getStatementContext();",
"\t\tsc.setTopResultSet(this, (NoPutResultSet[]) null);",
"",
"\t\t// Pick up any materialized subqueries",
"\t\tsubqueryTrackingArray = sc.getSubqueryTrackingArray();"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/UpdateResultSet.java",
"hunks": [
{
"added": [],
"header": "@@ -301,9 +301,6 @@ class UpdateResultSet extends DMLWriteResultSet",
"removed": [
"\t\t * NOTE: We need to set ourself as the top result set",
"\t\t * if this is not the 1st execution. (Done in constructor",
"\t\t * for 1st execution.)"
]
}
]
}
] |
derby-DERBY-4491-28e42550
|
DERBY-4491: Disable test of parameter metadata on small platforms since PreparedStatement.getParameterMetaData() is not available on JSR-169 vms.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@901277 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4491-2aa32e8a
|
DERBY-4491: Fix sealing violation which kills the AssertFailureTest on runs against insane jar files.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@899819 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/client/org/apache/derby/client/net/DynamicByteArrayOutputStream.java",
"hunks": [
{
"added": [
" Derby - Class org.apache.derby.client.net.DynamicByteArrayOutputStream"
],
"header": "@@ -1,6 +1,6 @@",
"removed": [
" Derby - Class org.apache.derby.shared.common.io.DynamicByteArrayOutputStream"
]
},
{
"added": [
"package org.apache.derby.client.net;"
],
"header": "@@ -19,9 +19,7 @@",
"removed": [
"package org.apache.derby.shared.common.io;",
"",
"import org.apache.derby.shared.common.sanity.SanityManager;"
]
}
]
},
{
"file": "java/client/org/apache/derby/client/net/InputStreamUtil.java",
"hunks": [
{
"added": [
" Derby - Class org.apache.derby.client.net.InputStreamUtil"
],
"header": "@@ -1,6 +1,6 @@",
"removed": [
" Derby - Class org.apache.derby.shared.common.io.InputStreamUtil"
]
},
{
"added": [
"package org.apache.derby.client.net;"
],
"header": "@@ -19,7 +19,7 @@",
"removed": [
"package org.apache.derby.shared.common.io;"
]
}
]
},
{
"file": "java/client/org/apache/derby/client/net/Request.java",
"hunks": [
{
"added": [],
"header": "@@ -23,9 +23,7 @@ package org.apache.derby.client.net;",
"removed": [
"import org.apache.derby.shared.common.io.DynamicByteArrayOutputStream;",
"import org.apache.derby.shared.common.sanity.SanityManager;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/services/io/InputStreamUtil.java",
"hunks": [
{
"added": [
"public final class InputStreamUtil {",
" private static final int SKIP_FRAGMENT_SIZE = Integer.MAX_VALUE;",
"",
"\t/**",
"\t\tRead an unsigned byte from an InputStream, throwing an EOFException",
"\t\tif the end of the input is reached.",
"",
"\t\t@exception IOException if an I/O error occurs.",
"\t\t@exception EOFException if the end of the stream is reached",
"",
"\t\t@see DataInput#readUnsignedByte",
"\t",
"\t*/",
"\tpublic static int readUnsignedByte(InputStream in) throws IOException {",
"\t\tint b = in.read();",
"\t\tif (b < 0)",
"\t\t\tthrow new EOFException();",
"",
"\t\treturn b;",
"\t}",
"",
"\t/**",
"\t\tRead a number of bytes into an array.",
"",
"\t\t@exception IOException if an I/O error occurs.",
"\t\t@exception EOFException if the end of the stream is reached",
"",
"\t\t@see DataInput#readFully",
"",
"\t*/",
"\tpublic static void readFully(InputStream in, byte b[],",
" int offset,",
" int len) throws IOException",
"\t{",
"\t\tdo {",
"\t\t\tint bytesRead = in.read(b, offset, len);",
"\t\t\tif (bytesRead < 0)",
"\t\t\t\tthrow new EOFException();",
"\t\t\tlen -= bytesRead;",
"\t\t\toffset += bytesRead;",
"\t\t} while (len != 0);",
"\t}",
"",
"",
"\t/**",
"\t\tRead a number of bytes into an array.",
" Keep reading in a loop until len bytes are read or EOF is reached or",
" an exception is thrown. Return the number of bytes read.",
" (InputStream.read(byte[],int,int) does not guarantee to read len bytes",
" even if it can do so without reaching EOF or raising an exception.)",
"",
"\t\t@exception IOException if an I/O error occurs.",
"\t*/",
"\tpublic static int readLoop(InputStream in,",
" byte b[],",
" int offset,",
" int len)",
" throws IOException",
"\t{",
" int firstOffset = offset;",
"\t\tdo {",
"\t\t\tint bytesRead = in.read(b, offset, len);",
"\t\t\tif (bytesRead <= 0)",
" break;",
"\t\t\tlen -= bytesRead;",
"\t\t\toffset += bytesRead;",
"\t\t} while (len != 0);",
" return offset - firstOffset;",
"\t}",
"",
" /**",
" * Skips until EOF, returns number of bytes skipped.",
" * @param is",
" * InputStream to be skipped.",
" * @return",
" * number of bytes skipped in fact.",
" * @throws IOException",
" * if IOException occurs. It doesn't contain EOFException.",
" * @throws NullPointerException",
" * if the param 'is' equals null.",
" */",
" public static long skipUntilEOF(InputStream is) throws IOException {",
" if(is == null)",
" throw new NullPointerException();",
"",
" long bytes = 0;",
" while(true){",
" long r = skipPersistent(is, SKIP_FRAGMENT_SIZE);",
" bytes += r;",
" if(r < SKIP_FRAGMENT_SIZE)",
" return bytes;",
" }",
" }",
"",
" /**",
" * Skips requested number of bytes,",
" * throws EOFException if there is too few bytes in the stream.",
" * @param is",
" * InputStream to be skipped.",
" * @param skippedBytes",
" * number of bytes to skip. if skippedBytes <= zero, do nothing.",
" * @throws EOFException",
" * if EOF meets before requested number of bytes are skipped.",
" * @throws IOException",
" * if IOException occurs. It doesn't contain EOFException.",
" * @throws NullPointerException",
" * if the param 'is' equals null.",
" */",
" public static void skipFully(InputStream is, long skippedBytes)",
" throws IOException {",
" if(is == null)",
" throw new NullPointerException();",
"",
" if(skippedBytes <= 0)",
" return;",
"",
" long bytes = skipPersistent(is, skippedBytes);",
"",
" if(bytes < skippedBytes)",
" throw new EOFException();",
" }",
"",
" /**",
" * Tries harder to skip the requested number of bytes.",
" * <p>",
" * Note that even if the method fails to skip the requested number of bytes,",
" * it will not throw an exception. If this happens, the caller can be sure",
" * that end-of-stream has been reached.",
" *",
" * @param in byte stream",
" * @param bytesToSkip the number of bytes to skip",
" * @return The number of bytes skipped.",
" * @throws IOException if reading from the stream fails",
" */",
" public static final long skipPersistent(InputStream in, long bytesToSkip)",
" throws IOException {",
" long skipped = 0;",
" while (skipped < bytesToSkip) {",
" long skippedNow = in.skip(bytesToSkip - skipped);",
" if (skippedNow == 0) {",
" if (in.read() == -1) {",
" // EOF, return what we have and leave it up to caller to",
" // decide what to do about it.",
" break;",
" } else {",
" skippedNow = 1; // Added to count below.",
" }",
" }",
" skipped += skippedNow;",
" }",
" return skipped;",
" }",
"}"
],
"header": "@@ -28,5 +28,156 @@ import java.io.*;",
"removed": [
"public final class InputStreamUtil extends org.apache.derby.shared.common.io.InputStreamUtil",
"{}"
]
}
]
}
] |
derby-DERBY-4496-685cec96
|
DERBY-4496 Column list size mismatch with ORDER BY in INSERT statement
Fixed this issue which was due to missed occurences of RCL.size() that
now (with ORDER BY in subqueries) need to be generalized to
RCL.visibleSize to be able to ignore synthesized columns. Added a new
test case for the failing repro. Also wired the OrderByInSubqueries
test into the regression suite.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@896388 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ResultColumnList.java",
"hunks": [
{
"added": [
" if ( (! countMismatchAllowed) &&",
" visibleSize() != nameList.visibleSize() )",
" \", nameList.visibleSize() = \" + nameList.visibleSize());",
" int size =",
" (countMismatchAllowed) ? nameList.visibleSize() : visibleSize();",
""
],
"header": "@@ -646,16 +646,19 @@ public class ResultColumnList extends QueryTreeNodeVector",
"removed": [
"\t\t\t if ((! countMismatchAllowed) && visibleSize() != nameList.size())",
"\t\t\t\t\t\", nameList.size() = \" + nameList.size());",
"\t\tint size = (countMismatchAllowed) ? nameList.size() : visibleSize();"
]
}
]
}
] |
derby-DERBY-4499-33bfdc00
|
DERBY-4499: Fix bug which prevented us from using UDTs as output args in database procedures.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@909415 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/StaticMethodCallNode.java",
"hunks": [
{
"added": [
"\t\t\t\t\t\tboolean isAnsiUDT = paramdtd.getTypeId().getBaseTypeId().isAnsiUDT();"
],
"header": "@@ -1162,6 +1162,7 @@ public class StaticMethodCallNode extends MethodCallNode",
"removed": []
}
]
}
] |
derby-DERBY-4502-58806b97
|
DERBY-4502: Make it possible to declare views against system tables
when authorization is enabled.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@896146 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/SchemaDescriptor.java",
"hunks": [
{
"added": [
"\tprivate String\t\t\taid;"
],
"header": "@@ -155,7 +155,7 @@ public final class SchemaDescriptor extends TupleDescriptor",
"removed": [
"\tprivate final String\t\t\taid;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/catalog/PermissionsCacheable.java",
"hunks": [
{
"added": [
" {"
],
"header": "@@ -75,6 +75,7 @@ class PermissionsCacheable implements Cacheable",
"removed": []
}
]
}
] |
derby-DERBY-4512-9b450ec3
|
DERBY-4512: Avoid unnecessary lookup in transaction table when adding transaction
The transaction table should not contain a transaction with the same
id as the one that's added with TransactionTable.add(). Therefore, put
the new transaction in the table unconditionally instead of checking
first whether there is an existing entry to reuse. This saves one
lookup in the transaction table per transaction.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@900714 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/store/raw/xact/TransactionTable.java",
"hunks": [
{
"added": [
" TransactionTableEntry newEntry = new TransactionTableEntry(",
" xact, id, 0, exclude ? TransactionTableEntry.EXCLUDE : 0);",
"",
" Object oldEntry = trans.put(id, newEntry);",
" if (SanityManager.DEBUG)",
" {",
" SanityManager.ASSERT(",
" oldEntry == null,",
" \"Trying to add a transaction that's already \" +",
" \"in the transaction table\");",
" if (SanityManager.DEBUG_ON(\"TranTrace\"))",
" SanityManager.DEBUG(",
" \"TranTrace\", \"adding transaction \" + id);",
" SanityManager.showTrace(new Throwable(\"TranTrace\"));",
" }"
],
"header": "@@ -125,36 +125,27 @@ public class TransactionTable implements Formatable",
"removed": [
"\t\t\tTransactionTableEntry ent = findTransactionEntry(id);",
"",
"\t\t\tif (ent == null)",
"\t\t\t{",
"\t\t\t\tent = new TransactionTableEntry",
"\t\t\t\t\t(xact, id, 0, ",
"\t\t\t\t\t exclude ? TransactionTableEntry.EXCLUDE : 0);",
"\t\t\t\ttrans.put(id, ent);",
"\t\t\t\tif (SanityManager.DEBUG)",
" if (SanityManager.DEBUG_ON(\"TranTrace\"))",
" {",
" SanityManager.DEBUG(",
" \"TranTrace\", \"adding transaction \" + id);",
" SanityManager.showTrace(new Throwable(\"TranTrace\"));",
" }",
"\t\t\t}",
"",
"\t\t\tif (SanityManager.DEBUG)",
"\t\t\t{",
"\t\t\t\tif (exclude != ent.needExclusion())",
"\t\t\t\t\tSanityManager.THROWASSERT(",
"\t\t\t\t\t \"adding the same transaction with different exclusion: \" +",
"\t\t\t\t\t exclude + \" \" + ent.needExclusion());",
"\t\t\t}"
]
}
]
}
] |
derby-DERBY-4513-e671fc78
|
DERBY-4513: Prevent NEXT VALUE FOR expressions from being used in various contexts.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@908627 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/sql/compile/CompilerContext.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.dictionary.SequenceDescriptor;"
],
"header": "@@ -33,6 +33,7 @@ import org.apache.derby.iapi.sql.ParameterValueSet;",
"removed": []
},
{
"added": [
"\tpublic static final int\t\t\tNEXT_VALUE_FOR_ILLEGAL\t\t=\t0x00004000;",
""
],
"header": "@@ -103,6 +104,8 @@ public interface CompilerContext extends Context",
"removed": []
},
{
"added": [
"\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tINTERNAL_SQL_ILLEGAL |",
" NEXT_VALUE_FOR_ILLEGAL"
],
"header": "@@ -116,7 +119,8 @@ public interface CompilerContext extends Context",
"removed": [
"\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tINTERNAL_SQL_ILLEGAL"
]
},
{
"added": [
" SQL_IN_ROUTINES_ILLEGAL |",
" NEXT_VALUE_FOR_ILLEGAL",
"\tpublic\tstatic\tfinal\tint\t\t\tWHERE_CLAUSE_RESTRICTION\t\t= NEXT_VALUE_FOR_ILLEGAL;",
"\tpublic\tstatic\tfinal\tint\t\t\tHAVING_CLAUSE_RESTRICTION\t\t= NEXT_VALUE_FOR_ILLEGAL;",
"\tpublic\tstatic\tfinal\tint\t\t\tON_CLAUSE_RESTRICTION\t\t= NEXT_VALUE_FOR_ILLEGAL;",
"\tpublic\tstatic\tfinal\tint\t\t\tAGGREGATE_RESTRICTION\t\t= NEXT_VALUE_FOR_ILLEGAL;",
"\tpublic\tstatic\tfinal\tint\t\t\tCONDITIONAL_RESTRICTION\t\t= NEXT_VALUE_FOR_ILLEGAL;",
"\tpublic\tstatic\tfinal\tint\t\t\tGROUP_BY_RESTRICTION\t\t= NEXT_VALUE_FOR_ILLEGAL;"
],
"header": "@@ -129,9 +133,16 @@ public interface CompilerContext extends Context",
"removed": [
" SQL_IN_ROUTINES_ILLEGAL"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/CompilerContextImpl.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.dictionary.SequenceDescriptor;"
],
"header": "@@ -36,6 +36,7 @@ import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;",
"removed": []
},
{
"added": [
" referencedSequences = null;"
],
"header": "@@ -162,6 +163,7 @@ public class CompilerContextImpl extends ContextImpl",
"removed": []
},
{
"added": [
"\tpublic void addReferencedSequence( SequenceDescriptor sd )",
" {",
" if ( referencedSequences == null ) { referencedSequences = new HashMap(); }",
"",
" referencedSequences.put( sd.getUUID(), sd );",
" }",
"",
"\t/**",
"\t * Report whether the given sequence has been referenced already.",
"\t */",
" public boolean isReferenced( SequenceDescriptor sd )",
" {",
" if ( referencedSequences == null ) { return false; }",
"",
" return referencedSequences.containsKey( sd.getUUID() );",
" }",
""
],
"header": "@@ -994,6 +996,23 @@ public class CompilerContextImpl extends ContextImpl",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/ConditionalNode.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.compile.CompilerContext;"
],
"header": "@@ -29,6 +29,7 @@ import org.apache.derby.iapi.services.sanity.SanityManager;",
"removed": []
},
{
"added": [
" CompilerContext cc = getCompilerContext();",
" ",
" int previousReliability = orReliability( CompilerContext.CONDITIONAL_RESTRICTION );",
" "
],
"header": "@@ -379,6 +380,10 @@ public class ConditionalNode extends ValueNode",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/GroupByColumn.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.compile.CompilerContext;"
],
"header": "@@ -24,6 +24,7 @@ package\torg.apache.derby.impl.sql.compile;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/JoinNode.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.compile.CompilerContext;"
],
"header": "@@ -29,6 +29,7 @@ import org.apache.derby.iapi.services.sanity.SanityManager;",
"removed": []
},
{
"added": [
" CompilerContext cc = getCompilerContext();",
" "
],
"header": "@@ -806,6 +807,8 @@ public class JoinNode extends TableOperatorNode",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/NextSequenceNode.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.compile.CompilerContext;"
],
"header": "@@ -26,6 +26,7 @@ import org.apache.derby.iapi.reference.ClassName;",
"removed": []
},
{
"added": [
" throws StandardException",
" {",
" CompilerContext cc = getCompilerContext();",
" ",
" if ( (cc.getReliability() & CompilerContext.NEXT_VALUE_FOR_ILLEGAL) != 0 )",
" {",
" throw StandardException.newException( SQLState.LANG_NEXT_VALUE_FOR_ILLEGAL );",
" }"
],
"header": "@@ -66,7 +67,14 @@ public class NextSequenceNode extends ValueNode {",
"removed": [
" throws StandardException {"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/SelectNode.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.compile.CompilerContext;"
],
"header": "@@ -30,6 +30,7 @@ import org.apache.derby.iapi.sql.compile.C_NodeTypes;",
"removed": []
},
{
"added": [
" ",
" CompilerContext cc = getCompilerContext();",
" ",
"\t\t\tcc.pushCurrentPrivType( Authorizer.SELECT_PRIV);",
"",
" int previousReliability = orReliability( CompilerContext.WHERE_CLAUSE_RESTRICTION );",
" cc.setReliability( previousReliability );"
],
"header": "@@ -573,12 +574,18 @@ public class SelectNode extends ResultSetNode",
"removed": [
"\t\t\tgetCompilerContext().pushCurrentPrivType( Authorizer.SELECT_PRIV);"
]
},
{
"added": [
"\t\tif (havingClause != null)",
" {",
" int previousReliability = orReliability( CompilerContext.HAVING_CLAUSE_RESTRICTION );",
""
],
"header": "@@ -605,7 +612,10 @@ public class SelectNode extends ResultSetNode",
"removed": [
"\t\tif (havingClause != null) {"
]
}
]
}
] |
derby-DERBY-4514-4deb9812
|
DERBY-4514 - j2ME test failures relating to RuntimeStatisticsParser;
now using org.apache.derbyTesting.junit.Utilities.split for JSR169
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@898638 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/testing/org/apache/derbyTesting/junit/RuntimeStatisticsParser.java",
"hunks": [
{
"added": [
" if (JDBC.vmSupportsJSR169())",
" {",
" // do something else then split.",
" String [] startPositionLines = Utilities.split(positionLines, '\\n');",
" return startPositionLines;",
" }",
" else",
" {",
" String [] startPositionLines = positionLines.split(\"\\n\");",
" return startPositionLines;",
" }",
" }"
],
"header": "@@ -417,8 +417,18 @@ public class RuntimeStatisticsParser {",
"removed": [
" String [] startPositionLines = positionLines.split(\"\\n\");",
" return startPositionLines;}"
]
},
{
"added": [
" if (JDBC.vmSupportsJSR169())",
" {",
" // do something else then split.",
" String [] stopPositionLines = Utilities.split(positionLines, '\\n');",
" return stopPositionLines;",
" }",
" else",
" {",
" String [] stopPositionLines = positionLines.split(\"\\n\");",
" return stopPositionLines;",
" }",
" }"
],
"header": "@@ -435,8 +445,18 @@ public class RuntimeStatisticsParser {",
"removed": [
" String [] startPositionLines = positionLines.split(\"\\n\");",
" return startPositionLines;}"
]
}
]
}
] |
derby-DERBY-4515-d4953ddd
|
DERBY-4515: Document and clarify the use of DataValueDescriptor.setValue(InputStream,int)
Clarified and improved documentation for the setValue method.
Added a constant to represent unknown length.
Rewrote code in EmbedPreparedStatement to remove an unnecessary variable.
Patch file: derby-4515-1a-setValue_stream_clarification.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@901648 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/types/DataValueDescriptor.java",
"hunks": [
{
"added": [
" /**",
" * Constant indicating that the logical length of a value (i.e. chars for",
" * string types or bytes for binary types) is unknown.",
" */",
" int UNKNOWN_LOGICAL_LENGTH = -1;",
""
],
"header": "@@ -95,6 +95,12 @@ import java.util.Calendar;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/jdbc/EmbedPreparedStatement.java",
"hunks": [
{
"added": [
" // Holds either UNKNOWN_LOGICAL_LENGTH or the exact logical length.",
" int usableLength = DataValueDescriptor.UNKNOWN_LOGICAL_LENGTH;",
" usableLength = (int)length;"
],
"header": "@@ -748,19 +748,16 @@ public abstract class EmbedPreparedStatement",
"removed": [
" // Default to max column width. This will be used to limit the",
" // amount of data read when operating on \"lengthless\" streams.",
" int usableLength = colWidth;",
" int intLength = (int)length;",
" usableLength = intLength;",
""
]
},
{
"added": [
" if (usableLength > colWidth) {",
" truncationLength = usableLength - colWidth;"
],
"header": "@@ -781,11 +778,9 @@ public abstract class EmbedPreparedStatement",
"removed": [
" // we have used intLength into which the length variable had",
" // been cast to an int and stored",
" if (intLength > colWidth) {",
" truncationLength = intLength - usableLength;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/jdbc/EmbedResultSet.java",
"hunks": [
{
"added": [
" // Force length to UNKNOWN_LOGICAL_LENGTH if stream is length less.",
" length = DataValueDescriptor.UNKNOWN_LOGICAL_LENGTH;"
],
"header": "@@ -2828,8 +2828,8 @@ public abstract class EmbedResultSet extends ConnectionChild",
"removed": [
" // Force length to -1 if stream is length less.",
" length = -1;"
]
},
{
"added": [
" int usableLength = DataValueDescriptor.UNKNOWN_LOGICAL_LENGTH;"
],
"header": "@@ -2936,7 +2936,7 @@ public abstract class EmbedResultSet extends ConnectionChild",
"removed": [
" int usableLength = -1;"
]
},
{
"added": [],
"header": "@@ -2993,8 +2993,6 @@ public abstract class EmbedResultSet extends ConnectionChild",
"removed": [
" // NOTE: The length argument to setValue is not used. If that",
" // changes, the value might also have to change."
]
}
]
}
] |
derby-DERBY-4519-1d0c179d
|
DERBY-4519: Infinite loop in StreamFileContainer.writeColumn
Fixed two issues;
- swapped offset and length arguments (removed them for the read-case)
- bounded the transfer buffer size, becuause InputStream.available() can
return both zero and a very high value. Size bounded by [64, 8192].
Patch file: derby-4519-2a-infinite_loop_fixes.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@901165 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/store/raw/data/StreamFileContainer.java",
"hunks": [
{
"added": [
" // Set a reasonable buffer size.",
" // To avoid extremely inefficient reads, and an infinite loop when",
" // InputStream.available() returns zero, a lower limit is set on",
" // the buffer size. To avoid using too much memory (especially in",
" // multi-user environments) an upper limit is set as well.",
" // The limits can be tuned, but note that using a too high default",
" // or lower limit can put unnecessary pressure on the memory sub-",
" // system and the GC process.",
" int bufferLen = Math.min(Math.max(inColumn.available(), 64), 8192);",
" int lenRead = inColumn.read(bufData);",
" out.write(bufData, 0, lenRead);"
],
"header": "@@ -731,16 +731,24 @@ public class StreamFileContainer implements TypedFormat, PrivilegedExceptionActi",
"removed": [
"\t\t\tint bufferLen = inColumn.available();",
"\t\t\t\tint lenRead = inColumn.read(bufData, bufferLen, 0);",
"\t\t\t\t\tout.write(bufData, lenRead, 0);"
]
}
]
}
] |
derby-DERBY-4520-04acfa56
|
DERBY-4520: Refactor and extend data type cloning facilities
Changed the cloneValue methods for BLOB and CLOB.
Noteworthy information:
- made BasicSortObserver force materialization when cloning
(required because it closes the underlying source result set).
- java.sql.[BC]lob values are assumed to be "clone-safe"
(i.e. more than one SQL[BC]lob object can reference them).
- in some cases invoking cloneValue(false) will result in a stream being
matieralized, because we don't know how to clone the stream otherwise.
A better future solution might be to use temporary disk storage instead.
- there's a TODO for implementing an optimization for small streams, where
Derby could choose to materialize the streams if the length is known.
This is only important for a few use-cases, but it avoids decoding the
stream data more than once.
Patch file: derby-4520-7b-lob_clonevalue_methods.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@907732 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/types/SQLBlob.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.services.io.CloneableStream;",
"import org.apache.derby.shared.common.sanity.SanityManager;",
""
],
"header": "@@ -24,8 +24,11 @@ package org.apache.derby.iapi.types;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLClob.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.services.io.CloneableStream;"
],
"header": "@@ -26,6 +26,7 @@ import org.apache.derby.iapi.error.StandardException;",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/BasicSortObserver.java",
"hunks": [
{
"added": [
" // here (i.e. used getClone, not cloneObject). We still",
" // do, as the sorter closes the underlying source result",
" // set very early, which causes store streams to fail",
" // because the container handle is closed.",
" newArray[i] = origArray[i].cloneValue(true);"
],
"header": "@@ -162,11 +162,12 @@ public class BasicSortObserver implements SortObserver",
"removed": [
" // here (i.e. used cloneValue, not cloneObject). We still",
" // do.",
" // TODO: change to 'true' below and add comment.",
"\t\t\tnewArray[i] = origArray[i].cloneValue(false);"
]
}
]
}
] |
derby-DERBY-4520-1b454a1f
|
DERBY-4520 (partial): Refactor and extend data type cloning facilities
Added functionality to clone store streams (without materialization).
Delayed filling the byte buffer in OverflowInputStream constructor and in
OverflowInputStream.resetStream.
Orginal patch contributed by Mike Matrigali (mikem_app at sbcglobal dot net) as
part of DERBY-3650, modified by Kristian Waagan (Kristian dot Waagan at Sun dot com).
Patch file: derby-4520-3b-CloneableStream_and_delayed_fill.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@902742 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/services/io/FormatIdInputStream.java",
"hunks": [
{
"added": [],
"header": "@@ -26,7 +26,6 @@ import java.io.IOException;",
"removed": [
"import org.apache.derby.iapi.reference.SQLState;"
]
},
{
"added": [
" implements ErrorObjectInput, Resetable, CloneableStream"
],
"header": "@@ -43,7 +42,7 @@ import org.apache.derby.iapi.services.context.ContextService;",
"removed": [
"\t implements ErrorObjectInput, Resetable"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/store/raw/data/ByteHolder.java",
"hunks": [
{
"added": [],
"header": "@@ -21,12 +21,9 @@",
"removed": [
"import org.apache.derby.iapi.services.sanity.SanityManager;",
"",
"import java.util.Vector;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/store/raw/data/OverflowInputStream.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.services.io.CloneableStream;",
"",
"",
"import java.io.InputStream;"
],
"header": "@@ -24,12 +24,16 @@ package org.apache.derby.impl.store.raw.data;",
"removed": []
},
{
"added": [
"implements Resetable, CloneableStream"
],
"header": "@@ -48,7 +52,7 @@ the datatype's stream is set using:",
"removed": [
"implements Resetable"
]
},
{
"added": [
" RecordHandle recordToLock) {"
],
"header": "@@ -104,9 +108,7 @@ implements Resetable",
"removed": [
" RecordHandle recordToLock)",
" throws IOException, StandardException",
" {"
]
},
{
"added": [],
"header": "@@ -114,8 +116,6 @@ implements Resetable",
"removed": [
"",
" fillByteHolder();"
]
},
{
"added": [
" // Simplify this code when we can use the Java 1.5 constructor",
" // taking the cause as an argument.",
" IOException ioe = new IOException(se.toString());",
" ioe.initCause(se);",
" throw ioe;"
],
"header": "@@ -156,7 +156,11 @@ implements Resetable",
"removed": [
" throw new IOException(se.toString());"
]
},
{
"added": [],
"header": "@@ -298,9 +302,6 @@ implements Resetable",
"removed": [
"",
" // fill the byte holder",
" fillByteHolder();"
]
},
{
"added": [
"",
" /**************************************************************************",
" * Public Methods of CloneableStream Interface",
" **************************************************************************/",
"",
" /**",
" * Clone this object.",
" * <p>",
" * Creates a deep copy of this object. The returned stream has its own",
" * working buffers and can be initialized, reset and read independently",
" * from this stream.",
" * <p>",
" * The cloned stream is set back to the beginning of stream, no matter",
" * where the current stream happens to be positioned.",
" *",
" * @return Copy of this stream which can be used independently.",
" */",
" public InputStream cloneStream() {",
" OverflowInputStream ret_stream = ",
" new OverflowInputStream(",
" bh.cloneEmpty(),",
" owner, ",
" firstOverflowPage, ",
" firstOverflowId, ",
" recordToLock);",
"",
" return(ret_stream);",
" }"
],
"header": "@@ -314,4 +315,32 @@ implements Resetable",
"removed": []
}
]
}
] |
derby-DERBY-4520-55bc97fc
|
DERBY-4477 Selecting / projecting a column whose value is represented by a stream more than once fails
Patch derby-4477-lowmem-2, which adds test cases to check that lobs
are not materialized when large, for the use cases covered by this
issue. The test cases are added to the lowmem suite, which is not not
part of the regular suites.All. This commit is preparatory in that the
lobs are still small, so these changes should be revisited to change
their sizes when the cloning handles materialization properly, cf
DERBY-3650 and DERBY-4520.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@904538 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/testing/org/apache/derbyTesting/functionTests/util/streams/LoopingAlphabetReader.java",
"hunks": [
{
"added": [
" /**",
" * Reopen the stream.",
" */",
" public void reopen()",
" throws IOException {",
" this.closed = false;",
" reset();",
" }",
""
],
"header": "@@ -176,6 +176,15 @@ public class LoopingAlphabetReader",
"removed": []
}
]
}
] |
derby-DERBY-4520-854dd109
|
DERBY-4520 (partial): Refactor and extend data type cloning facilities
Renamed getClone to cloneValue and added the boolean argument 'forceMaterialization', whose default has been set to false.
Removed special handling of RowLocation in ValueRow.getNewNullRow.
Made constructors used for cloning private in SQLSmallInt and SQLTinyInt.
Added argument in constructor used for cloning in XML (whether to force materialization or not).
Removed some unused imports.
Note that the actual cloning-methods haven't been changed yet (expect changes in SQLBlob and SQLClob in a follow-up patch).
Patch file: derby-4520-5a-getClone_renamed_cloneValue.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@906595 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/types/SQLBinary.java",
"hunks": [
{
"added": [
" return cloneValue(false);"
],
"header": "@@ -616,7 +616,7 @@ abstract class SQLBinary",
"removed": [
" return getClone();"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLBoolean.java",
"hunks": [
{
"added": [
"\t/** @see DataValueDescriptor#cloneValue */",
"\tpublic DataValueDescriptor cloneValue(boolean forceMaterialization)"
],
"header": "@@ -325,8 +325,8 @@ public final class SQLBoolean",
"removed": [
"\t/** @see DataValueDescriptor#getClone */",
"\tpublic DataValueDescriptor getClone()"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLChar.java",
"hunks": [
{
"added": [
" * difference of this method from cloneValue is this method does not",
" if ((stream == null) && (_clobValue == null)) {",
" return cloneValue(false);",
" }"
],
"header": "@@ -1316,11 +1316,13 @@ readingLoop:",
"removed": [
" * difference of this method from getClone is this method does not ",
" if ((stream == null) && (_clobValue == null)) { return getClone(); }"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLDate.java",
"hunks": [
{
"added": [
"\t/** @see DataValueDescriptor#cloneValue */",
"\tpublic DataValueDescriptor cloneValue(boolean forceMaterialization)"
],
"header": "@@ -244,8 +244,8 @@ public final class SQLDate extends DataType",
"removed": [
"\t/** @see DataValueDescriptor#getClone */",
"\tpublic DataValueDescriptor getClone()"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLLongint.java",
"hunks": [
{
"added": [
"\t/** @see DataValueDescriptor#cloneValue */",
"\tpublic DataValueDescriptor cloneValue(boolean forceMaterialization)"
],
"header": "@@ -237,8 +237,8 @@ public final class SQLLongint",
"removed": [
"\t/** @see DataValueDescriptor#getClone */",
"\tpublic DataValueDescriptor getClone()"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLRef.java",
"hunks": [
{
"added": [
" /** @see DataValueDescriptor#cloneValue */",
" public DataValueDescriptor cloneValue(boolean forceMaterialization)"
],
"header": "@@ -188,8 +188,8 @@ public class SQLRef extends DataType implements RefDataValue",
"removed": [
"\t/** @see DataValueDescriptor#getClone */",
"\tpublic DataValueDescriptor getClone()"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLSmallint.java",
"hunks": [
{
"added": [
"\t/** @see DataValueDescriptor#cloneValue */",
"\tpublic DataValueDescriptor cloneValue(boolean forceMaterialization)"
],
"header": "@@ -252,8 +252,8 @@ public final class SQLSmallint",
"removed": [
"\t/** @see DataValueDescriptor#getClone */",
"\tpublic DataValueDescriptor getClone()"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLTimestamp.java",
"hunks": [
{
"added": [
"\t/** @see DataValueDescriptor#cloneValue */",
"\tpublic DataValueDescriptor cloneValue(boolean forceMaterialization)"
],
"header": "@@ -269,8 +269,8 @@ public final class SQLTimestamp extends DataType",
"removed": [
"\t/** @see DataValueDescriptor#getClone */",
"\tpublic DataValueDescriptor getClone()"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLTinyint.java",
"hunks": [
{
"added": [
"\t/* This constructor gets used for the cloneValue() method */",
"\tprivate SQLTinyint(byte val, boolean isnull) {"
],
"header": "@@ -109,9 +109,8 @@ public final class SQLTinyint",
"removed": [
"\t/* This constructor gets used for the getClone() method */",
"\tpublic SQLTinyint(byte val, boolean isnull)",
"\t{"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/XML.java",
"hunks": [
{
"added": [
" * Private constructor used for the {@code cloneValue} method."
],
"header": "@@ -154,7 +154,7 @@ public class XML",
"removed": [
" * Private constructor used for the getClone() method."
]
},
{
"added": [
" * @param materialize whether or not to force materialization of the",
" * underlying source data",
" private XML(SQLChar val, int xmlType, boolean seqWithAttr,",
" boolean materialize) {",
" xmlStringValue = (val == null ? null",
" : (SQLChar)val.cloneValue(materialize));"
],
"header": "@@ -163,10 +163,13 @@ public class XML",
"removed": [
" private XML(SQLChar val, int xmlType, boolean seqWithAttr)",
" {",
" xmlStringValue = (val == null ? null : (SQLChar)val.getClone());"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/GenericParameter.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.types.DataTypeDescriptor;",
"import org.apache.derby.iapi.types.DataValueDescriptor;",
"import org.apache.derby.iapi.types.TypeId;"
],
"header": "@@ -21,36 +21,17 @@",
"removed": [
"import org.apache.derby.iapi.services.loader.ClassInspector;",
"",
"import org.apache.derby.iapi.sql.ParameterValueSet;",
"",
"import org.apache.derby.iapi.types.DataValueDescriptor;",
"import org.apache.derby.iapi.types.BooleanDataValue;",
"import org.apache.derby.iapi.types.BitDataValue;",
"import org.apache.derby.iapi.types.DateTimeDataValue;",
"import org.apache.derby.iapi.types.NumberDataValue;",
"import org.apache.derby.iapi.types.StringDataValue;",
"import org.apache.derby.iapi.types.UserDataValue;",
"import org.apache.derby.iapi.types.TypeId;",
"import org.apache.derby.iapi.types.DataTypeDescriptor;",
"import org.apache.derby.iapi.types.*;",
"",
"",
"import org.apache.derby.iapi.services.sanity.SanityManager;",
"",
"import org.apache.derby.iapi.types.*;",
"import org.apache.derby.iapi.types.*;",
"import java.lang.reflect.Array;",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java",
"hunks": [
{
"added": [],
"header": "@@ -21,7 +21,6 @@",
"removed": [
"import java.sql.SQLException;"
]
},
{
"added": [],
"header": "@@ -34,7 +33,6 @@ import org.apache.derby.catalog.IndexDescriptor;",
"removed": [
"import org.apache.derby.iapi.error.PublicAPI;"
]
},
{
"added": [],
"header": "@@ -44,7 +42,6 @@ import org.apache.derby.iapi.sql.Activation;",
"removed": [
"import org.apache.derby.iapi.sql.conn.ConnectionUtil;"
]
},
{
"added": [],
"header": "@@ -82,7 +79,6 @@ import org.apache.derby.iapi.store.access.TransactionController;",
"removed": [
"import org.apache.derby.iapi.types.StringDataValue;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java",
"hunks": [
{
"added": [
" RowLocation rlClone = (RowLocation) rowLocation.cloneValue(false);"
],
"header": "@@ -186,7 +186,7 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": [
" RowLocation rlClone = (RowLocation) rowLocation.getClone();"
]
},
{
"added": [
" templateColArray[fkInfo.colArray[i] - 1].cloneValue(false);",
" newRowColArray[i] = fkInfo.rowLocation.cloneValue(false);"
],
"header": "@@ -1635,10 +1635,10 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": [
" (templateColArray[fkInfo.colArray[i] - 1]).getClone();",
" newRowColArray[i] = fkInfo.rowLocation.getClone();"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/UpdateResultSet.java",
"hunks": [
{
"added": [
" /* If hash table is not full, we add it in.",
" * The key of the hash entry is the string value of the RowLocation.",
" * If the hash table is full, as the comments above this function",
" * say, we scan forward.",
" *",
" * Need to save a clone because when we get cached currentRow, \"rl\"",
" * shares the same reference, so is changed at the same time.",
" */",
" RowLocation updatedRL = (RowLocation) rl.cloneValue(false);"
],
"header": "@@ -689,14 +689,15 @@ class UpdateResultSet extends DMLWriteResultSet",
"removed": [
"\t\t\t/* If hash table is not full, we add it in. The key of the hash entry",
"\t\t\t * is the string value of the RowLocation. If the hash table is full,",
"\t\t\t * as the comments above this function say, we scan forward.",
"\t\t\t *",
"\t\t\t * Need to save a clone because when we get cached currentRow, \"rl\" shares the",
"\t\t\t * same reference, so is changed at the same time.",
"\t\t\t */",
"\t\t\tRowLocation updatedRL = (RowLocation) rl.getClone();"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/ValueRow.java",
"hunks": [
{
"added": [
" rowClone.setColumn(colCtr +1, column[colCtr].cloneValue(false));"
],
"header": "@@ -127,7 +127,7 @@ public class ValueRow implements ExecRow",
"removed": [
"\t\t\t\trowClone.setColumn(colCtr + 1, column[colCtr].getClone());"
]
},
{
"added": [
" rowClone.setColumn(colCtr + 1, column[colCtr].getNewNull());"
],
"header": "@@ -145,22 +145,7 @@ public class ValueRow implements ExecRow",
"removed": [
"\t\t\t\tif (column[colCtr] instanceof RowLocation)",
"\t\t\t\t{",
"\t\t\t\t\t/*",
"\t\t\t\t\t** The getClone() method for a RowLocation has the same",
"\t\t\t\t\t** name as for DataValueDescriptor, but it's on a different",
"\t\t\t\t\t** interface, so the cast must be different.",
"\t\t\t\t\t**",
"\t\t\t\t\t*/",
"\t\t\t\t\trowClone.setColumn(colCtr + 1, column[colCtr].getClone());",
"\t\t\t\t}",
"\t\t\t\telse",
"\t\t\t\t{",
"\t\t\t\t\t// otherwise, get a new null",
"\t\t\t\t\trowClone.setColumn(colCtr + 1,",
"\t\t\t\t\t\t((DataValueDescriptor) (column[colCtr])).getNewNull());",
"\t\t\t\t}"
]
},
{
"added": [
" return column[columnPosition -1].cloneValue(false);"
],
"header": "@@ -184,7 +169,7 @@ public class ValueRow implements ExecRow",
"removed": [
"\t\treturn column[columnPosition -1].getClone();"
]
}
]
},
{
"file": "java/testing/org/apache/derbyTesting/unitTests/store/T_AccessRow.java",
"hunks": [
{
"added": [
"\t\t\tretval[index] = column[index].cloneValue(false);"
],
"header": "@@ -129,7 +129,7 @@ public class T_AccessRow",
"removed": [
"\t\t\tretval[index] = column[index].getClone();"
]
}
]
}
] |
derby-DERBY-4520-9902f436
|
DERBY-4520: Refactor and extend data type cloning facilities
Removed unhelpful assert statements.
Added @see tags to copy JavaDoc from the interfaces.
Patch file: n/a
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@902798 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/services/io/FormatIdInputStream.java",
"hunks": [
{
"added": [
" /** @see Resetable#resetStream() */",
" /** @see Resetable#initStream() */",
" /** @see Resetable#closeStream() */",
" /** @see CloneableStream#cloneStream() */"
],
"header": "@@ -209,37 +209,32 @@ public final class FormatIdInputStream extends DataInputStream",
"removed": [
" if (SanityManager.DEBUG)",
" SanityManager.ASSERT(in instanceof Resetable);",
" if (SanityManager.DEBUG)",
" SanityManager.ASSERT(in instanceof Resetable);",
" if (SanityManager.DEBUG)",
" SanityManager.ASSERT(in instanceof Resetable);",
" if (SanityManager.DEBUG)",
" SanityManager.ASSERT(in instanceof CloneableStream);",
""
]
}
]
}
] |
derby-DERBY-4520-a8bc2170
|
DERBY-4520 (partial): Refactor and extend data type cloning facilities
RowLocation doesn't need to be cloned using CloneableObject.cloneObject,
clone using DataValueDescriptor.getClone instead.
This is the first step in the process to remove CloneableObject completely.
Patch file: derby-4520-1a-RowLocation_cloning.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@901642 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/types/RowLocation.java",
"hunks": [
{
"added": [],
"header": "@@ -21,7 +21,6 @@",
"removed": [
"import org.apache.derby.iapi.types.DataValueDescriptor;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java",
"hunks": [
{
"added": [
" RowLocation rlClone = (RowLocation) rowLocation.getClone();"
],
"header": "@@ -186,7 +186,7 @@ class InsertResultSet extends DMLWriteResultSet implements TargetResultSet",
"removed": [
"\t\t\tRowLocation rlClone = (RowLocation) rowLocation.cloneObject();"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/store/access/heap/HeapRowLocation.java",
"hunks": [
{
"added": [],
"header": "@@ -101,15 +101,6 @@ public class HeapRowLocation extends DataType implements RowLocation",
"removed": [
"\t/*",
"\t** Methods of CloneableObject.",
"\t*/",
"\tpublic Object cloneObject()",
"\t{",
"\t\treturn getClone();",
"\t\t",
"\t}",
""
]
}
]
}
] |
derby-DERBY-4520-b4e2eb78
|
DERBY-4520 (partial): Refactor and extend data type cloning facilities
Moved the method cloneObject into DataValueDescriptor, removed the
CloneableObject interface.
Changed cloneObject return type from Object to DataValueDescriptor.
Removed some unused imports.
Rewrote a few comments.
Patch file: derby-4520-2a-remove_CloneableObject_iface.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@902050 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/store/access/BackingStoreHashtable.java",
"hunks": [
{
"added": [],
"header": "@@ -21,13 +21,8 @@",
"removed": [
"import org.apache.derby.iapi.services.sanity.SanityManager;",
"",
"import org.apache.derby.iapi.services.io.Storable;",
"",
"import org.apache.derby.iapi.types.CloneableObject;"
]
},
{
"added": [
" // History: We used to materialize streams when getting a clone",
" // here (i.e. used getClone, not cloneObject). We still do.",
" // Beetle 4896."
],
"header": "@@ -354,8 +349,9 @@ public class BackingStoreHashtable",
"removed": [
"\t\t// the only difference between getClone and cloneObject is cloneObject does",
"\t\t// not objectify a stream. We use getClone here. Beetle 4896."
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/DataType.java",
"hunks": [
{
"added": [],
"header": "@@ -24,7 +24,6 @@ package org.apache.derby.iapi.types;",
"removed": [
"import org.apache.derby.iapi.types.CloneableObject;"
]
},
{
"added": [],
"header": "@@ -35,7 +34,6 @@ import org.apache.derby.iapi.services.sanity.SanityManager;",
"removed": [
"import java.lang.Comparable;"
]
},
{
"added": [
" implements DataValueDescriptor, Comparable"
],
"header": "@@ -65,7 +63,7 @@ import java.util.Calendar;",
"removed": [
"\timplements DataValueDescriptor, CloneableObject, Comparable"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/BasicSortObserver.java",
"hunks": [
{
"added": [],
"header": "@@ -22,11 +22,6 @@",
"removed": [
"import org.apache.derby.iapi.services.io.Storable;",
"",
"import org.apache.derby.iapi.types.CloneableObject;",
"",
"import org.apache.derby.iapi.services.sanity.SanityManager;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/TemporaryRowHolderImpl.java",
"hunks": [
{
"added": [],
"header": "@@ -25,22 +25,18 @@ import org.apache.derby.iapi.services.sanity.SanityManager;",
"removed": [
"import org.apache.derby.iapi.sql.execute.ExecutionFactory;",
"import org.apache.derby.iapi.sql.ResultDescription;",
"import org.apache.derby.iapi.types.CloneableObject;",
"import org.apache.derby.iapi.services.io.FormatableBitSet;"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/store/access/sort/MergeSort.java",
"hunks": [
{
"added": [],
"header": "@@ -30,7 +30,6 @@ import org.apache.derby.iapi.error.StandardException;",
"removed": [
"import org.apache.derby.iapi.types.CloneableObject;"
]
}
]
},
{
"file": "java/testing/org/apache/derbyTesting/unitTests/store/T_AccessRow.java",
"hunks": [
{
"added": [],
"header": "@@ -27,8 +27,6 @@ import org.apache.derby.iapi.services.io.Storable;",
"removed": [
"import org.apache.derby.iapi.types.CloneableObject;",
""
]
}
]
}
] |
derby-DERBY-4520-d7aa7617
|
DERBY-4520: Refactor and extend data type cloning facilities
Renamed cloneObject to cloneHolder.
Restructured SQLBinary.cloneHolder (behavior unchanged).
Brushed up some comments.
Patch file: derby-4520-4a-cloneObject_renamed_cloneHolder.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@903150 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/execute/TemporaryRowHolderImpl.java",
"hunks": [
{
"added": [
" /* Avoid materializing a stream just because it goes through a temp table.",
" * It is OK to have a stream in the temp table (in memory or spilled to",
" * disk). The assumption is that one stream does not appear in two rows.",
" * For \"update\", one stream can be in two rows and the materialization is",
" * done in UpdateResultSet. Note to future users of this class who may",
" * insert a stream into this temp holder:",
" * (1) As mentioned above, one un-materialized stream can't appear in two",
" * rows; you need to objectify it first otherwise.",
" * (2) If you need to retrieve an un-materialized stream more than once",
" * from the temp holder, you need to either materialize the stream",
" * the first time, or, if there's a memory constraint, in the first",
" * time create a RememberBytesInputStream with the byte holder being",
" * BackingStoreByteHolder, finish it, and reset it after usage.",
" * A third option is to create a stream clone, but this requires that",
" * the container handles are kept open until the streams have been",
" * drained.",
" *",
" * Beetle 4896.",
" */"
],
"header": "@@ -169,18 +169,25 @@ class TemporaryRowHolderImpl implements TemporaryRowHolder",
"removed": [
"\t/* Avoid materializing a stream just because it goes through a temp table. It is OK to",
"\t * have a stream in the temp table (in memory or spilled to disk). The assumption is",
"\t * that one stream does not appear in two rows. For \"update\", one stream can be in two",
"\t * rows and the materialization is done in UpdateResultSet. Note to future users of this",
"\t * class who may insert a stream into this temp holder: (1) As mentioned above, one",
"\t * un-materialized stream can't appear in two rows; you need to objectify it first otherwise.",
"\t * (2) If you need to retrieve a un-materialized stream more than once from the temp holder,",
"\t * you need to either materialize the stream the first time, or, if there's a memory constraint,",
"\t * in the first time create a RememberBytesInputStream with the byte holder being",
"\t * BackingStoreByteHolder, finish it, and reset it after usage.",
"\t * beetle 4896.",
"\t */"
]
},
{
"added": [
" cloned.setColumn(i + 1, cols[i].cloneHolder());"
],
"header": "@@ -191,7 +198,7 @@ class TemporaryRowHolderImpl implements TemporaryRowHolder",
"removed": [
" cloned.setColumn(i + 1, cols[i].cloneObject());"
]
}
]
}
] |
derby-DERBY-4524-9556e964
|
DERBY-4524: Incorrect DROP FUNCTION logic in AccessTest's tearDown() method
Removed the tearDown() method since it did not do anything (DROP
FUNCTION would always fail with no connection) and was not
needed (a CleanDatabaseTestSetup decorator already took care of
dropping the function).
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@902288 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4526-db26d0a6
|
DERBY--5445 (Enhance existing concurrency test to stress sequence generators to also stress identity columns)
DERBY-4565 added a concurrency test to stress sequence generation. I am making simple modifications to that test to add identity column stress testing. Based on a command line parameter, the test will either do sequence generation testing or identity column testing. If no parameter is specified, it will default to doing sequene generation testing.
The test already takes number of parameters. One of those parameters is load options parameter. Load option parameter is indicated by -load_opts on command line and it is followed by a comma separated list of sub-parameters. An eg of load option parameter is as follows
-load_opts debugging=1,numberOfGenerators=5,tablesPerGenerator=10,insertsPerTransaction=100
I am adding another pair to the comma separated sub-parameters,namely identityTest=aNumber. If identityTest is 1, then the test will do identity column stress testing. For any other value for identityTest, the test will do sequence generation testing. If the user doesn't specify identityTest in load options, the test will perform sequence generation testing.
eg of asking the test to do identity column testing
java org.apache.derbyTesting.perf.clients.Runner -driver org.apache.derby.jdbc.EmbeddedDriver -init -load seq_gen -load_opts debugging=1,numberOfGenerators=5,tablesPerGenerator=10,insertsPerTransaction=100,identityTest=1 -gen b2b -threads 10
Two possible way of asking the test to do sequence generation testing(identityTest set to a value other than 1 or identityTest is not specified)
java org.apache.derbyTesting.perf.clients.Runner -driver org.apache.derby.jdbc.EmbeddedDriver -init -load seq_gen -load_opts debugging=1,numberOfGenerators=5,tablesPerGenerator=10,insertsPerTransaction=100,identityTest=2 -gen b2b -threads 10
OR
java org.apache.derbyTesting.perf.clients.Runner -driver org.apache.derby.jdbc.EmbeddedDriver -init -load seq_gen -load_opts debugging=1,numberOfGenerators=5,tablesPerGenerator=10,insertsPerTransaction=100 -gen b2b -threads 10
When I run the test for identity columns, I can consistently see it running into derby lock time out with nested sequencec contention error while trying to get current identity value and advancing(this is what we want to achieve from the test ie that it is able to stress the functionality enough to run into contention while trying to get next range for identity columns.) Additionally, there are some lock time out errors raised by store while trying to update system catalog(this is expected too because of multiple threads simulataneously trying to do inserts into a table with identity column). I also in my codeline reverted to changes before DERBY-5426 (DERBY-4526 is Improve the error raised by too much contention on a sequence/identity.) was fixed and saw sequence contention errors (without the lock time out error encapsulation).
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1179374 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/testing/org/apache/derbyTesting/perf/clients/Runner.java",
"hunks": [
{
"added": [
" System.out.print(\"initializing database for \");",
" System.out.println((Runner.getLoadOpt( \"identityTest\", 0 ) == 1)?",
" \t\t\t\t\"identity column testing...\":",
" \t\t\t\t\"sequence generator testing...\");"
],
"header": "@@ -93,7 +93,10 @@ public class Runner {",
"removed": [
" System.out.println(\"initializing database...\");"
]
},
{
"added": [
"\" - identityTest: 1 means do identity column testing, any other number \\n\" +",
"\" means do sequence generator testing. If no identityTest is specified \\n\" +",
"\" then sequence generator testing will be done by default \\n\" +"
],
"header": "@@ -250,6 +253,9 @@ public class Runner {",
"removed": []
}
]
},
{
"file": "java/testing/org/apache/derbyTesting/perf/clients/SequenceGeneratorConcurrency.java",
"hunks": [
{
"added": [
" * Machinery to test the concurrency of sequence/identity generators."
],
"header": "@@ -32,7 +32,7 @@ import java.util.Random;",
"removed": [
" * Machinery to test the concurrency of sequence generators."
]
},
{
"added": [
" private boolean _runIdentityTest;"
],
"header": "@@ -53,6 +53,7 @@ public class SequenceGeneratorConcurrency",
"removed": []
},
{
"added": [
" //If no identityTest is specified, then do sequence testing.",
" _runIdentityTest = ( Runner.getLoadOpt( \"identityTest\", 0 ) == 1);"
],
"header": "@@ -60,6 +61,8 @@ public class SequenceGeneratorConcurrency",
"removed": []
},
{
"added": [
" /** Return whether we are doing identity column testing */",
" public boolean runIdentityTest() { return _runIdentityTest; }",
""
],
"header": "@@ -75,6 +78,9 @@ public class SequenceGeneratorConcurrency",
"removed": []
},
{
"added": [
" buffer.append( \", identityTest = \" + _runIdentityTest );"
],
"header": "@@ -83,6 +89,7 @@ public class SequenceGeneratorConcurrency",
"removed": []
},
{
"added": [
" boolean runIdentityTest = _loadOptions.runIdentityTest();",
" \tif (!runIdentityTest)",
" runDDL( conn, \"create sequence \" + makeSequenceName( sequence ) );",
" \tif (runIdentityTest)",
" runDDL( conn, \"create table \" + makeTableName( sequence, table ) + \"( a int, b int generated always as identity)\" );",
" \telse",
" runDDL( conn, \"create table \" + makeTableName( sequence, table ) + \"( a int )\" );"
],
"header": "@@ -115,14 +122,19 @@ public class SequenceGeneratorConcurrency",
"removed": [
" runDDL( conn, \"create sequence \" + makeSequenceName( sequence ) );",
" runDDL( conn, \"create table \" + makeTableName( sequence, table ) + \"( a int )\" );"
]
},
{
"added": [
" boolean runIdentityTest = _loadOptions.runIdentityTest();"
],
"header": "@@ -183,6 +195,7 @@ public class SequenceGeneratorConcurrency",
"removed": []
},
{
"added": [
" if ( table == 0 ){",
" \tif(!runIdentityTest) ",
" \tps = prepareStatement( _conn, debugging, valuesClause );",
" \telse",
" \tps = prepareStatement( _conn, debugging, \"values (1)\" );",
" }",
" else { ",
" \tif(!runIdentityTest) ",
" ps = prepareStatement( _conn, debugging, \"insert into \" + tableName + \"( a ) \" + valuesClause ); ",
" \telse",
" \tps = prepareStatement( _conn, debugging, \"insert into \" + tableName + \"( a ) values(1)\"); ",
" \t}"
],
"header": "@@ -195,8 +208,18 @@ public class SequenceGeneratorConcurrency",
"removed": [
" if ( table == 0 ) { ps = prepareStatement( _conn, debugging, valuesClause ); }",
" else { ps = prepareStatement( _conn, debugging, \"insert into \" + tableName + \"( a ) \" + valuesClause ); }"
]
}
]
}
] |
derby-DERBY-4527-81d947f3
|
DERBY-4527; create org.apache.derbyTesting.functionTests.harness.ibm17 class
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@904235 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/testing/org/apache/derbyTesting/functionTests/harness/ibm17.java",
"hunks": [
{
"added": [
"/*",
"",
" Derby - Class org.apache.derbyTesting.functionTests.harness.ibm17",
"",
" Licensed to the Apache Software Foundation (ASF) under one or more",
" contributor license agreements. See the NOTICE file distributed with",
" this work for additional information regarding copyright ownership.",
" The ASF licenses this file to You under the Apache License, Version 2.0",
" (the \"License\"); you may not use this file except in compliance with",
" the License. You may obtain a copy of the License at",
"",
" http://www.apache.org/licenses/LICENSE-2.0",
"",
" Unless required by applicable law or agreed to in writing, software",
" distributed under the License is distributed on an \"AS IS\" BASIS,",
" WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
" See the License for the specific language governing permissions and",
" limitations under the License.",
"",
" */",
"",
"package org.apache.derbyTesting.functionTests.harness;",
"",
"import java.util.Vector;",
"import java.util.StringTokenizer;",
"",
"/**",
" <p>This class is for IBM's jdk 1.7.",
"",
" */",
"public class ibm17 extends jvm {",
"",
"\tpublic String getName(){return \"ibm17\";}",
" public ibm17(boolean noasyncgc, boolean verbosegc, boolean noclassgc,",
" long ss, long oss, long ms, long mx, String classpath, String prof,",
" boolean verify, boolean noverify, boolean nojit, Vector D) {",
" super(noasyncgc,verbosegc,noclassgc,ss,oss,ms,mx,classpath,prof,",
"\t\tverify,noverify,nojit,D);",
" }",
" // more typical use:",
" public ibm17(String classpath, Vector D) {",
" super(classpath,D);",
" }",
" // more typical use:",
" public ibm17(long ms, long mx, String classpath, Vector D) {",
" super(ms,mx,classpath,D);",
" }",
" // actual use",
" public ibm17() { }",
"",
" // return the command line to invoke this VM. The caller then adds",
" // the class and program arguments.",
" public Vector getCommandLine()",
" {",
" StringBuffer sb = new StringBuffer();",
" Vector v = super.getCommandLine();",
" appendOtherFlags(sb);",
" String s = sb.toString();",
" StringTokenizer st = new StringTokenizer(s);",
" while (st.hasMoreTokens())",
" {",
" v.addElement(st.nextToken());",
" }",
" return v;",
"\t}",
"",
"\tpublic void appendOtherFlags(StringBuffer sb)",
"\t{",
" if (noasyncgc) warn(\"ibm17 does not support noasyncgc\");",
" if (verbosegc) sb.append(\" -verbose:gc\");",
" if (noclassgc) sb.append(\" -Xnoclassgc\");",
" if (ss>=0) warn(\"ibm17 does not support ss\");",
" if (oss>=0) warn(\"ibm17 does not support oss\");",
" if (ms>=0) {",
" sb.append(\" -ms\");",
" sb.append(ms);",
" }",
" if (mx>=0) {",
" sb.append(\" -mx\");",
" sb.append(mx);",
" }",
" if (classpath!=null) {",
" sb.append(\" -classpath \");",
" sb.append(classpath);",
" }",
" if (prof!=null) warn(\"ibm17 does not support prof\");",
" if (verify) warn(\"ibm17 does not support verify\");",
" if (noverify) warn(\"ibm17 does not support noverify\");",
" if (nojit) sb.append(\" -Djava.compiler=NONE\");",
" if (D != null)",
" for (int i=0; i<D.size();i++) {",
"\t sb.append(\" -D\");",
"\t sb.append((String)(D.elementAt(i)));",
" }",
" }",
"\tpublic String getDintro() { return \"-D\"; }",
"}",
""
],
"header": "@@ -0,0 +1,98 @@",
"removed": []
}
]
}
] |
derby-DERBY-453-48e8f30a
|
DERBY-453 - Second checkin to add BigDecimalHandler
Contributed by Deepa Remesh
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@265043 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/testing/org/apache/derbyTesting/functionTests/util/BigDecimalHandler.java",
"hunks": [
{
"added": [
"/*",
"",
"Derby - Class org.apache.derbyTesting.functionTests.util",
"",
"Copyright 2005 The Apache Software Foundation or its licensors, as applicable.",
"",
"Licensed under the Apache License, Version 2.0 (the \"License\");",
"you may not use this file except in compliance with the License.",
"You may obtain a copy of the License at",
"",
" http://www.apache.org/licenses/LICENSE-2.0",
"",
"Unless required by applicable law or agreed to in writing, software",
"distributed under the License is distributed on an \"AS IS\" BASIS,",
"WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
"See the License for the specific language governing permissions and",
"limitations under the License.",
"",
"*/",
"",
"package org.apache.derbyTesting.functionTests.util;",
"import java.sql.ResultSet;",
"import java.sql.SQLException;",
"",
"/**",
" * BigDecimalHandler provides wrappers for JDBC API methods which use BigDecimal.",
" * When writing tests which use BigDecimal, the methods in this class can be called",
" * instead of directly calling JDBC methods. This way the same test can be used in JVMs ",
" * like J2ME/CDC/Foundation Profile, which do not have BigDecimal class. ",
" * ",
" * * @author deepa",
" *",
" */",
"public class BigDecimalHandler {",
"\t",
"\tstatic int representation;",
"\tstatic final int STRING_REPRESENTATION = 1;",
"\tstatic final int BIGDECIMAL_REPRESENTATION = 2;",
"\t",
"\tstatic{",
"\t\ttry{",
"\t\t\tClass.forName(\"java.math.BigDecimal\");",
"\t\t\trepresentation = BIGDECIMAL_REPRESENTATION;",
"\t\t}",
"\t\tcatch(ClassNotFoundException e){",
"\t\t\t//Used for J2ME/Foundation",
"\t\t\trepresentation = STRING_REPRESENTATION;",
"\t\t}",
"\t}",
"\t",
"\t/** This method is a wrapper for the ResultSet method getBigDecimal(int columnIndex).",
"\t * ",
"\t * @param rs ResultSet ",
"\t * @param columnIndex Column Index ",
"\t * @return String value of getXXX(columnIndex)method on the ResultSet",
"\t * @throws SQLException",
"\t */",
"\tpublic static String getBigDecimalString(ResultSet rs, int columnIndex) throws SQLException{",
"\t\tString bigDecimalString=null;",
"\t\t",
"\t\tswitch(representation){",
"\t\t\tcase BIGDECIMAL_REPRESENTATION:",
"\t\t\t\t//Call toString() only for non-null values, else return null",
"\t\t\t\tif(rs.getBigDecimal(columnIndex) != null)",
"\t\t\t\t\tbigDecimalString = rs.getBigDecimal(columnIndex).toString();",
"\t\t\t\tbreak;",
"\t\t\tcase STRING_REPRESENTATION:",
"\t\t\t\tbigDecimalString = rs.getString(columnIndex);",
"\t\t\t\tif((bigDecimalString != null) && !canConvertToDecimal(rs,columnIndex))",
"\t\t\t\t\tthrow new SQLException(\"Invalid data conversion. Method not called.\");",
"\t\t\t\tbreak;",
"\t\t\tdefault:\t",
"\t\t\t\tnew Exception(\"Failed: Invalid Big Decimal representation\").printStackTrace();",
"\t\t}",
"\t\treturn bigDecimalString;",
"\t}",
"\t",
"\t/** This method is a wrapper for ResultSet method getBigDecimal(String columnName).",
"\t * ",
"\t * @param rs ResultSet",
"\t * @param columnName Column Name",
"\t * @param columnIndex Coulumn Index",
"\t * @return String value of getXXX(columnName)method on the ResultSet",
"\t * @throws SQLException",
"\t */",
"\tpublic static String getBigDecimalString(ResultSet rs, String columnName, int columnIndex) throws SQLException{",
"\t\tString bigDecimalString = null;",
"\t\t\t\t",
"\t\tswitch(representation){",
"\t\t\tcase BIGDECIMAL_REPRESENTATION:",
"\t\t\t\t//Call toString() only for non-null values, else return null",
"\t\t\t\tif(rs.getBigDecimal(columnName) != null){",
"\t\t\t\t\tbigDecimalString = rs.getBigDecimal(columnName).toString();",
"\t\t\t\t}",
"\t\t\t\tbreak;",
"\t\t\tcase STRING_REPRESENTATION:",
"\t\t\t\tbigDecimalString = rs.getString(columnName);",
"\t\t\t\tif((bigDecimalString != null) && !canConvertToDecimal(rs,columnIndex))",
"\t\t\t\t\tthrow new SQLException(\"Invalid data conversion. Method not called.\");",
"\t\t\t\tbreak;",
"\t\t\tdefault:\t",
"\t\t\t\tnew Exception(\"Failed: Invalid Big Decimal representation\").printStackTrace();",
"\t\t}",
"\t\treturn bigDecimalString;",
"\t}",
"\t",
"\t/** This method is a wrapper for ResultSet method getObject(int columnIndex) ",
"\t * ",
"\t * @param rs ResultSet",
"\t * @param columnIndex ColumnIndex",
"\t * @return String value of getXXX(columnIndex) method on the ResultSet",
"\t * @throws SQLException",
"\t */",
"\tpublic static String getObjectString(ResultSet rs, int columnIndex) throws SQLException{",
"\t\tString objectString = null;",
"\t\t",
"\t\tswitch(representation){",
"\t\t\tcase BIGDECIMAL_REPRESENTATION:",
"\t\t\t\t//Call toString() only for non-null values, else return null",
"\t\t\t\tif(rs.getObject(columnIndex) != null)",
"\t\t\t\t\tobjectString = rs.getObject(columnIndex).toString();",
"\t\t\t\tbreak;",
"\t\t\tcase STRING_REPRESENTATION:",
"\t\t\t\tint columnType= rs.getMetaData().getColumnType(columnIndex);",
"\t\t\t\tif(columnType == java.sql.Types.DECIMAL){",
"\t\t\t\t\tobjectString = rs.getString(columnIndex);",
"\t\t\t\t}\t",
"\t\t\t\telse",
"\t\t\t\t\t//Call toString() only for non-null values, else return null",
"\t\t\t\t\tif(rs.getObject(columnIndex) != null)",
"\t\t\t\t\t\tobjectString = rs.getObject(columnIndex).toString();",
"\t\t\t\t\tbreak;",
"\t\t\tdefault:\t",
"\t\t\t\tnew Exception(\"Failed: Invalid Big Decimal representation\").printStackTrace();",
"\t\t}",
"\t\treturn objectString;",
"\t}\t",
"\t",
"\t/** This method is a wrapper for ResultSet method getObject(String columnName)",
"\t * @param rs ResultSet",
"\t * @param columnName Column Name",
"\t * @param columnIndex Column Index",
"\t * @return String value of getXXX(columnName) method on the ResultSet",
"\t * @throws SQLException",
"\t */",
"\tpublic static String getObjectString(ResultSet rs, String columnName, int columnIndex) throws SQLException{",
"\t\tString objectString = null;",
"\t\t\t\t",
"\t\tswitch(representation){",
"\t\t\tcase BIGDECIMAL_REPRESENTATION:",
"\t\t\t\t//Call toString() only for non-null values, else return null",
"\t\t\t\tif(rs.getObject(columnName) != null)",
"\t\t\t\t\tobjectString = rs.getObject(columnName).toString();",
"\t\t\t\tbreak;",
"\t\t\tcase STRING_REPRESENTATION:",
"\t\t\t\tint columnType= rs.getMetaData().getColumnType(columnIndex);",
"\t\t\t\tif(columnType == java.sql.Types.DECIMAL){",
"\t\t\t\t\tobjectString = rs.getString(columnName);",
"\t\t\t\t}\t",
"\t\t\t\telse",
"\t\t\t\t\t//Call toString() only for non-null values, else return null\t\t\t\t\t",
"\t\t\t\t\tif(rs.getObject(columnName) != null)",
"\t\t\t\t\t\tobjectString = rs.getObject(columnName).toString();",
"\t\t\t\t\tbreak;",
"\t\t\tdefault:\t",
"\t\t\t\tnew Exception(\"Failed: Invalid Big Decimal representation\").printStackTrace();",
"\t\t}",
"\t\treturn objectString;",
"\t}",
"\t",
"\t/** This method checks that the SQL type can be converted to Decimal",
"\t * ",
"\t * @param rs ResultSet",
"\t * @param columnIndex Column Index",
"\t * @return true if the SQL type is convertible to DECIMAL, false otherwise.",
"\t * @throws SQLException",
"\t */",
"\tprotected static boolean canConvertToDecimal(ResultSet rs,int columnIndex) throws SQLException{",
"\t\tint columnType= rs.getMetaData().getColumnType(columnIndex);",
"\t\tif(columnType == java.sql.Types.BIGINT || ",
"\t\t columnType == java.sql.Types.DECIMAL || ",
"\t\t columnType == java.sql.Types.DOUBLE || ",
"\t\t columnType == java.sql.Types.FLOAT || ",
"\t\t columnType == java.sql.Types.INTEGER || ",
"\t\t columnType == java.sql.Types.NUMERIC || ",
"\t\t columnType == java.sql.Types.REAL || ",
"\t\t columnType == java.sql.Types.SMALLINT || ",
"\t\t columnType == java.sql.Types.TINYINT){",
"\t\t\treturn true;",
"\t\t}",
"\t\treturn false;",
"\t}",
"\t",
"}"
],
"header": "@@ -0,0 +1,194 @@",
"removed": []
}
]
}
] |
derby-DERBY-4531-188fc76d
|
DERBY-4531 Client setCharacterStream closes its Reader argument stream in finalizer
Patch derby-4531b, which makes the client behave like embedded in this
respect.
It also adds a new test which shows the clients divergent behavior
prior to the fix in EncodedInputStream. Since the test relies on
explicit gc, it is not guaranteed to show the presence of the bug,
though.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@987331 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4531-51826c3c
|
DERBY-4477 Selecting / projecting a column whose value is represented by a stream more than once fails
Follow-up patch for an intermittet bug caused by previous insert, due
to newly filed DERBY-4531: derby-4477-lowmem-followup.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@905621 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/testing/org/apache/derbyTesting/functionTests/util/streams/LoopingAlphabetReader.java",
"hunks": [
{
"added": [],
"header": "@@ -176,15 +176,6 @@ public class LoopingAlphabetReader",
"removed": [
" /**",
" * Reopen the stream.",
" */",
" public void reopen()",
" throws IOException {",
" this.closed = false;",
" reset();",
" }",
""
]
}
]
}
] |
derby-DERBY-4531-a1b56d7d
|
DERBY-4531: Client setCharacterStream closes its Reader argument stream in finalizer
Added a work-around for the issue where the SQLState differs depending on
whether the finalizer has been run or not.
Can be removed when DERBY-4531 has been fixed properly.
Patch file: derby-4531-1a-test_workaround.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@938972 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4538-f246c9c6
|
DERBY-4538
Committing patch DERBY4538_NoReferencingClause_diff_v2.txt attached to jira DERBY-4538(removed redundant ; as pointed by Knut).
This changes the UPDATE and DELETE statement codes to be little bit smarter when they decide what columns should be part of the read map. Currently, as soon as these 2 nodes find that there are relevant triggers on the table, we decide to read all the columns from the table. With the patch, we will check if all the relevant tiggers have missing REFERENCING clause. If yes, then do not need to read all the columns. Just the columns needed by the UPDATE/DELETE statement. This will get rid of OOM we run into when the table has LOB columns BUT only in the case when the UPDATE/DELETE statement does not reference the LOB column and all the triggers defined on them have missing REFERENCING clause. I have enabled the TriggerTests in lowmem suite with the missing REFERENCING clause cases enabled. For all the other test cases, I simply return from those test cases without actually testing it because we do not have fix for those cases yet. The lowmem suite does not get run regularly and when it is run, as the name indicates, it runs with limited heap. I wanted us to be able to run these tests with default heap as well. To achieve that, I am including the TriggerTests in lang suite too.
The INSERT table with INSERT triggers work fine already without my changes as long as the INSERT statement does not reference the LOB column.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@917771 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/sql/compile/DeleteNode.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.dictionary.TriggerDescriptor;"
],
"header": "@@ -33,6 +33,7 @@ import org.apache.derby.iapi.sql.dictionary.TableDescriptor;",
"removed": []
},
{
"added": [
"",
"import java.util.Enumeration;"
],
"header": "@@ -70,6 +71,8 @@ import org.apache.derby.impl.sql.execute.FKInfo;",
"removed": []
},
{
"added": [
"\t *\t5)\tif there are any DELETE triggers, then do one of the following",
"\t * a)If all of the triggers have MISSING referencing clause, then that",
"\t * means that the trigger actions do not have access to before and",
"\t * after values. In that case, there is no need to blanketly decide ",
"\t * to include all the columns in the read map just because there are",
"\t * triggers defined on the table.",
"\t * b)Since one/more triggers have REFERENCING clause on them, get all",
"\t * the columns because we don't know what the user will ultimately ",
"\t * reference."
],
"header": "@@ -908,7 +911,15 @@ public class DeleteNode extends DMLModStatementNode",
"removed": [
"\t *\t5)\tif there are any DELETE triggers, marks all columns in the bitmap"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/UpdateNode.java",
"hunks": [
{
"added": [],
"header": "@@ -24,25 +24,15 @@ package\torg.apache.derby.impl.sql.compile;",
"removed": [
"import org.apache.derby.iapi.services.context.ContextManager;",
"",
"import org.apache.derby.iapi.services.loader.GeneratedMethod;",
"",
"import org.apache.derby.iapi.services.compiler.MethodBuilder;",
"import org.apache.derby.iapi.sql.compile.CompilerContext;",
"import org.apache.derby.iapi.sql.compile.Visitable;",
"import org.apache.derby.iapi.sql.compile.Visitor;",
"",
"import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;"
]
},
{
"added": [
"import org.apache.derby.iapi.sql.dictionary.TriggerDescriptor;"
],
"header": "@@ -51,9 +41,9 @@ import org.apache.derby.iapi.sql.dictionary.ConstraintDescriptorList;",
"removed": [
"import org.apache.derby.iapi.sql.dictionary.IndexRowGenerator;"
]
},
{
"added": [],
"header": "@@ -61,7 +51,6 @@ import org.apache.derby.iapi.sql.execute.CursorResultSet;",
"removed": [
"import org.apache.derby.iapi.sql.Activation;"
]
},
{
"added": [
"import java.util.Enumeration;"
],
"header": "@@ -77,11 +66,8 @@ import org.apache.derby.iapi.util.ReuseFactory;",
"removed": [
"import java.sql.SQLException;",
"import java.util.ArrayList;",
"import java.util.Arrays;",
"import java.util.Properties;"
]
},
{
"added": [
"\t *\t6)\tif there are any UPDATE triggers, then do one of the following",
"\t * a)If all of the triggers have MISSING referencing clause, then that",
"\t * means that the trigger actions do not have access to before and",
"\t * after values. In that case, there is no need to blanketly decide ",
"\t * to include all the columns in the read map just because there are",
"\t * triggers defined on the table.",
"\t * b)Since one/more triggers have REFERENCING clause on them, get all",
"\t * the columns because we don't know what the user will ultimately ",
"\t * reference."
],
"header": "@@ -952,7 +938,15 @@ public final class UpdateNode extends DMLModStatementNode",
"removed": [
"\t *\t6)\tif there are any triggers, marks all columns in the bitmap"
]
}
]
}
] |
derby-DERBY-4539-3b77ab83
|
DERBY-4539: Make it possible to state the optional clauses of the CREATE SEQUENCE statement in an order.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@909633 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4540-c2118336
|
DERBY-4540 AssertionFailedError in store.AccessTest.testCS4595B_UniqueIndex(AccessTest.java:1729)' on SUSE Linux / IBM JIT - r9
This change just adds some diagnostic print out to the assert that seems
to be intermittently failing. Next time we should see the number of pages
expected and the full dump of the statistics output the test is getting when
it gets this failure.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1233098 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4542-b173580e
|
DERBY-4542: Avoid materialization where possible when cloning CollatorSQLClob
Made CollatorSQLClob able to clone itself without materializing the value
(doesn't apply in all cases, sometimes we are forced to materialize because
it is requested to do so, or because we don't know how to clone the underlying
value).
Patch file: derby-4542-1a-avoid_materialization.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@910200 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/types/CollatorSQLClob.java",
"hunks": [
{
"added": [
" if (forceMaterialization) {",
" try {",
" return new CollatorSQLClob(getString(),",
" holderForCollationSensitiveInfo.getCollatorForCollation());",
" }",
" catch (StandardException se)",
" {",
" if (SanityManager.DEBUG)",
" SanityManager.THROWASSERT(\"Unexpected exception\", se);",
" return null;",
" }",
" } else {",
" // If materialization isn't forced, let SQLClob (super) decide how",
" // to clone the value.",
" SQLClob clob = (SQLClob)super.cloneValue(forceMaterialization);",
" CollatorSQLClob clone = new CollatorSQLClob(",
" holderForCollationSensitiveInfo.getCollatorForCollation());",
" clone.copyState(clob);",
" return clone;",
" }"
],
"header": "@@ -109,17 +109,26 @@ class CollatorSQLClob extends SQLClob implements CollationElementsInterface",
"removed": [
"\t\ttry",
"\t\t{",
"\t\t\treturn new CollatorSQLClob(getString(), ",
"\t\t\t\t\tholderForCollationSensitiveInfo.getCollatorForCollation());",
"\t\t}",
"\t\tcatch (StandardException se)",
"\t\t{",
"\t\t\tif (SanityManager.DEBUG)",
"\t\t\t\tSanityManager.THROWASSERT(\"Unexpected exception\", se);",
"\t\t\treturn null;",
"\t\t}"
]
}
]
}
] |
derby-DERBY-4543-d92fa8c6
|
DERBY-4543: CLOB values incorrectly stored on disk using the old header format
Aadded missing !-operator in EmbedPreparedStatement.
Replaced DD_VERSION_CURRENT with DD_VERSION_DERBY_10_5.
Renamed several methods and variables (stop using soft-upgrade mode term, use stream header format instead).
Patch file: derby-4543-1a-clob_hdr_format_fix.diff
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@908418 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/iapi/types/ClobStreamHeaderGenerator.java",
"hunks": [
{
"added": [
" * which header format should be used. This is currently only determined by",
" * consulting the data dictionary about the version."
],
"header": "@@ -56,7 +56,8 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" * whether the database is being accessed in soft upgrade mode or not."
]
},
{
"added": [
" * {@code true} if the database version is prior to 10.5, {@code false} if",
" * the version is 10.5 or newer. If {@code null}, the version will be",
" * determined by obtaining the database context through the context service.",
" private Boolean isPreDerbyTenFive;",
" * which header format to use based on the database version."
],
"header": "@@ -64,15 +65,15 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" * {@code true} if database is being accessed in soft upgrade mode,",
" * {@code false} is not. If {@code null}, the mode will be determined by",
" * obtaining the database context through the context service.",
" private Boolean inSoftUpgradeMode;",
" * if the database is being accessed in soft upgrade mode or not."
]
},
{
"added": [
" * Creates a new generator using the specified header format.",
" * @param isPreDerbyTenFive {@code true} if the database version is prior",
" * to 10.5, {@code false} if the version is 10.5 or newer",
" public ClobStreamHeaderGenerator(boolean isPreDerbyTenFive) {",
" // Do not try to determine the version through the cottext service, use",
" // the specified value instead.",
" this.isPreDerbyTenFive = Boolean.valueOf(isPreDerbyTenFive);",
" * 10.5 (or newer), and a byte count if we are accessing a database created",
" * by a version prior to 10.5.",
" * @return {@code false} if a byte count is expected (prior to 10.5),",
" * {@code true} if a character count is expected (10.5 and newer).",
" if (callbackDVD != null && isPreDerbyTenFive == null) {",
" determineHeaderFormat();",
" // Expect byte count if older than 10.5, char count otherwise.",
" return !isPreDerbyTenFive.booleanValue();"
],
"header": "@@ -84,33 +85,34 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" * Creates a new generator for a database in the given mode.",
" * @param inSoftUpgradeMode {@code true} if the database is being accessed",
" * in soft upgrade mode, {@code false} if not",
" public ClobStreamHeaderGenerator(boolean inSoftUpgradeMode) {",
" // Do not try to determine if we are in soft upgrade mode, use the",
" // specified value for it.",
" this.inSoftUpgradeMode = Boolean.valueOf(inSoftUpgradeMode);",
" * 10.5 (or newer), and a byte count if we are accessing a database in",
" * soft upgrade mode.",
" * @return {@code false} if in soft upgrade mode, {@code true} if not.",
" if (callbackDVD != null && inSoftUpgradeMode == null) {",
" determineMode();",
" // Expect byte count if in soft upgrade mode, char count otherwise.",
" return !inSoftUpgradeMode.booleanValue();"
]
},
{
"added": [
" if (callbackDVD != null && isPreDerbyTenFive == null) {",
" determineHeaderFormat();",
" if (isPreDerbyTenFive == Boolean.FALSE) {"
],
"header": "@@ -123,11 +125,11 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" if (callbackDVD != null && inSoftUpgradeMode == null) {",
" determineMode();",
" if (inSoftUpgradeMode == Boolean.FALSE) {"
]
},
{
"added": [
" if (callbackDVD != null && isPreDerbyTenFive == null) {",
" determineHeaderFormat();",
" if (isPreDerbyTenFive == Boolean.FALSE) {"
],
"header": "@@ -159,11 +161,11 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" if (callbackDVD != null && inSoftUpgradeMode == null) {",
" determineMode();",
" if (inSoftUpgradeMode == Boolean.FALSE) {"
]
},
{
"added": [
" if (callbackDVD != null && isPreDerbyTenFive == null) {",
" determineHeaderFormat();",
" if (!isPreDerbyTenFive.booleanValue()) {"
],
"header": "@@ -195,10 +197,10 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" if (callbackDVD != null && inSoftUpgradeMode == null) {",
" determineMode();",
" if (!inSoftUpgradeMode.booleanValue()) {"
]
},
{
"added": [
" if (callbackDVD != null && isPreDerbyTenFive == null) {",
" determineHeaderFormat();",
" if (!isPreDerbyTenFive.booleanValue()) {"
],
"header": "@@ -221,10 +223,10 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" if (callbackDVD != null && inSoftUpgradeMode == null) {",
" determineMode();",
" if (!inSoftUpgradeMode.booleanValue()) {"
]
},
{
"added": [
" * Determines which header format to use.",
" * <p>",
" * <em>Implementation note:</em> The header format is determined by",
" * consulting the data dictionary throught the context service. If there is",
" * no context, the operation will fail.",
" *",
" * @throws IllegalStateException if there is no context",
" private void determineHeaderFormat() {"
],
"header": "@@ -237,10 +239,15 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" * Determines if the database being accessed is accessed in soft upgrade",
" * mode or not.",
" private void determineMode() {"
]
},
{
"added": [
" isPreDerbyTenFive = Boolean.valueOf(!dd.checkVersion("
],
"header": "@@ -249,7 +256,7 @@ public final class ClobStreamHeaderGenerator",
"removed": [
" inSoftUpgradeMode = Boolean.valueOf(!dd.checkVersion("
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/types/SQLClob.java",
"hunks": [
{
"added": [
" * <ul> <li>{@code setStreamHeaderFormat} has been invoked before the header"
],
"header": "@@ -616,7 +616,7 @@ public class SQLClob",
"removed": [
" * <ul> <li>{@code setSoftUpgradeMode} has been invoked before the header"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/jdbc/EmbedPreparedStatement.java",
"hunks": [
{
"added": [
" * Tells which header format to use when writing CLOBs into the store.",
" * This is lazily set if we need it, and there are currently only two",
" * valid header formats to choose between.",
" * @see #usePreTenFiveHdrFormat()",
" private Boolean usePreTenFiveHdrFormat;"
],
"header": "@@ -93,12 +93,13 @@ public abstract class EmbedPreparedStatement",
"removed": [
" * Tells if we're accessing a database in soft upgrade mode or not.",
" * This is lazily set if we need it.",
" * @see #isSoftUpgraded()",
" private Boolean inSoftUpgradeMode;"
]
},
{
"added": [
" dvd.setStreamHeaderFormat(usePreTenFiveHdrFormat());"
],
"header": "@@ -743,7 +744,7 @@ public abstract class EmbedPreparedStatement",
"removed": [
" dvd.setSoftUpgradeMode(isSoftUpgraded());"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/jdbc/EmbedResultSet.java",
"hunks": [
{
"added": [
" // this, tell the DVD/generator which header format to use.",
" dvd.setStreamHeaderFormat(Boolean.valueOf(",
" checkVersion(DataDictionary.DD_VERSION_DERBY_10_5, null)));"
],
"header": "@@ -2930,11 +2930,10 @@ public abstract class EmbedResultSet extends ConnectionChild",
"removed": [
" // this, tell the DVD/generator whether we are running in soft",
" // upgrade mode or not.",
" dvd.setSoftUpgradeMode(Boolean.valueOf(",
" checkVersion(DataDictionary.DD_VERSION_CURRENT, null)));"
]
}
]
}
] |
derby-DERBY-4544-961ecca2
|
DERBY-4544: Don't use the SQLClob.getLength() optimization on non-resetable streams--this fixes a data corruption.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@1091169 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4545-36461a87
|
DERBY-4545: Fix UDTTest to run cleanly on small devices.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@909123 13f79535-47bb-0310-9956-ffa450edef68
|
[] |
derby-DERBY-4551-1eeea613
|
DERBY-4551 Allow database user to execute stored procedures with same permissions as database owner and/or routine definer
Follow-up patch derby-4551-followup-1b (plus some small hygiene adjustments).
The problem is that the substatement executed as part of
ResultSet.{insertRow, updateRow,deleteRow} pushes a new statement
context. This statement context is consulted when constructing the
activation for the substatement, to see if the activation shall have a
parent activation (which is used to get the correct SQL session
context),
cf. GenericLanguageConnectionContext#getCurrentSQLSessionContext.
However, the newly pushed statement context was missing its parent's
activation, so the substatement instead get the top level session
context, whose current user is not the DEFINER bur rather the session
user. cf BaseActivation#setupSQLSessionContextForChildren, hence the
authorization error.
The patch makes sure the nested statement context initially gets the
(new) parent context set.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@999570 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/impl/jdbc/EmbedResultSet.java",
"hunks": [
{
"added": [
" StatementContext currSC = lcc.getStatementContext();",
" Activation parentAct = null;",
"",
" if (currSC != null) {",
" parentAct = currSC.getActivation();",
" }",
""
],
"header": "@@ -3611,6 +3611,13 @@ public abstract class EmbedResultSet extends ConnectionChild",
"removed": []
},
{
"added": [
"",
" // A priori, the new statement context inherits the activation",
" // of the existing statementContext, so that that activation",
" // ends up as parent of the new activation 'act' created below,",
" // which will be the activation of the pushed statement",
" // context.",
" statementContext.setActivation(parentAct);",
""
],
"header": "@@ -3619,6 +3626,14 @@ public abstract class EmbedResultSet extends ConnectionChild",
"removed": []
},
{
"added": [
" StatementContext currSC = lcc.getStatementContext();",
" Activation parentAct = null;",
"",
" if (currSC != null) {",
" parentAct = currSC.getActivation();",
" }",
"",
"",
" // A priori, the new statement context inherits the activation of",
" // the existing statementContext, so that that activation ends up",
" // as parent of the new activation 'act' created below, which will",
" // be the activation of the pushed statement context.",
" statementContext.setActivation(parentAct);",
""
],
"header": "@@ -3700,8 +3715,22 @@ public abstract class EmbedResultSet extends ConnectionChild",
"removed": []
}
]
}
] |
derby-DERBY-4551-9f601725
|
DERBY-4551 Allow database user to execute stored procedures with same permissions as database owner and/or routine definer
Patch derby-4551-4, which implements the specification attached to
this issue for executing routines with definer's right, and also adds
tests for this feature. A separate issue has been filed for
documentation changes needed.
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@952227 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/engine/org/apache/derby/catalog/types/RoutineAliasInfo.java",
"hunks": [
{
"added": [
" /** Mask for the SECURITY INVOKER/DEFINER field */",
" private static final short SECURITY_DEFINER_MASK = (short) 0x20;",
""
],
"header": "@@ -57,6 +57,9 @@ public class RoutineAliasInfo extends MethodAliasInfo",
"removed": []
},
{
"added": [
" bit 5 on if running with definer's right, off otherwise"
],
"header": "@@ -93,6 +96,7 @@ public class RoutineAliasInfo extends MethodAliasInfo",
"removed": []
},
{
"added": [
" this(methodName,",
" parameterCount,",
" parameterNames,",
" parameterTypes,",
" parameterModes,",
" dynamicResultSets,",
" parameterStyle,",
" sqlAllowed,",
" isDeterministic,",
" false /* definersRights*/,",
" true,",
" (TypeDescriptor) null);",
" public RoutineAliasInfo(String methodName,",
" int parameterCount,",
" String[] parameterNames,",
" TypeDescriptor[] parameterTypes,",
" int[] parameterModes,",
" int dynamicResultSets,",
" short parameterStyle,",
" short sqlAllowed,",
" boolean isDeterministic,",
" boolean definersRights,",
" boolean calledOnNullInput,",
" TypeDescriptor returnType)"
],
"header": "@@ -120,16 +124,35 @@ public class RoutineAliasInfo extends MethodAliasInfo",
"removed": [
"\t\tthis(methodName, parameterCount, parameterNames, parameterTypes, parameterModes, ",
" dynamicResultSets, parameterStyle, sqlAllowed, isDeterministic, true, (TypeDescriptor) null);",
"\tpublic RoutineAliasInfo(String methodName, int parameterCount, String[] parameterNames,",
"\t\tTypeDescriptor[]\tparameterTypes, int[] parameterModes, int dynamicResultSets, short parameterStyle, short sqlAllowed,",
" boolean isDeterministic, boolean calledOnNullInput, TypeDescriptor returnType)"
]
},
{
"added": [
"",
" if (definersRights) {",
" this.sqlOptions = (short) (sqlOptions | SECURITY_DEFINER_MASK);",
" }",
""
],
"header": "@@ -141,6 +164,11 @@ public class RoutineAliasInfo extends MethodAliasInfo",
"removed": []
},
{
"added": [
" public boolean hasDefinersRights()",
" {",
" return ( (sqlOptions & SECURITY_DEFINER_MASK) != 0 );",
" }",
""
],
"header": "@@ -222,6 +250,11 @@ public class RoutineAliasInfo extends MethodAliasInfo",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/conn/LanguageConnectionContext.java",
"hunks": [
{
"added": [
" /**",
" * Get the Authorization Id of the current user",
" *",
" * @param a activation",
" * @return String the authorization id",
" */",
" public String getCurrentUserId(Activation a);",
"",
" /**",
" * Get the Authorization Id of the session user",
" *",
" * @return String the authorization id",
" */",
" public String getSessionUserId();",
""
],
"header": "@@ -418,12 +418,21 @@ public interface LanguageConnectionContext extends Context {",
"removed": [
"\t/**",
"\t *\tGet the Authorization Id",
"\t *",
"\t * @return String\tthe authorization id",
"\t */",
"\tpublic String getAuthorizationId();"
]
},
{
"added": [
" * @param a activation",
" public boolean roleIsSettable(Activation a, String role)",
" throws StandardException;"
],
"header": "@@ -1131,12 +1140,14 @@ public interface LanguageConnectionContext extends Context {",
"removed": [
"\tpublic boolean roleIsSettable(String role) throws StandardException;"
]
},
{
"added": [
" * @param true if the method should run with definer's rights",
" * @param definer authorization id of the definer",
" public void setupNestedSessionContext(Activation a,",
" boolean definersRights,",
" String definer)",
" throws StandardException;"
],
"header": "@@ -1155,8 +1166,13 @@ public interface LanguageConnectionContext extends Context {",
"removed": [
"\tpublic void setupNestedSessionContext(Activation a);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/conn/SQLSessionContext.java",
"hunks": [
{
"added": [],
"header": "@@ -70,7 +70,6 @@ public interface SQLSessionContext {",
"removed": [
" * The empty string is returned if role is NONE."
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/StatementColumnPermission.java",
"hunks": [
{
"added": [],
"header": "@@ -89,7 +89,6 @@ public class StatementColumnPermission extends StatementTablePermission",
"removed": [
"\t\t\t\t\t String authorizationId,"
]
},
{
"added": [
" if (hasPermissionOnTable(lcc, activation, forGrant, ps)) {",
" String currentUserId = lcc.getCurrentUserId(activation);",
""
],
"header": "@@ -97,11 +96,12 @@ public class StatementColumnPermission extends StatementTablePermission",
"removed": [
"\t\tif (hasPermissionOnTable(lcc, activation,",
"\t\t\t\t\t\t\t\t\t authorizationId, forGrant, ps)) {"
]
},
{
"added": [
" currentUserId,"
],
"header": "@@ -111,7 +111,7 @@ public class StatementColumnPermission extends StatementTablePermission",
"removed": [
"\t\t\t\t\t\t\t\t\t\t\t\t\tauthorizationId,"
]
},
{
"added": [
" currentUserId,"
],
"header": "@@ -120,7 +120,7 @@ public class StatementColumnPermission extends StatementTablePermission",
"removed": [
"\t\t\t\t\t\t\t\t\t\t\t\tauthorizationId,"
]
},
{
"added": [
" rd = dd.getRoleGrantDescriptor(role, currentUserId, dbo);"
],
"header": "@@ -162,7 +162,7 @@ public class StatementColumnPermission extends StatementTablePermission",
"removed": [
"\t\t\trd = dd.getRoleGrantDescriptor(role, authorizationId, dbo);"
]
},
{
"added": [
" currentUserId,"
],
"header": "@@ -236,7 +236,7 @@ public class StatementColumnPermission extends StatementTablePermission",
"removed": [
"\t\t\t\t\t authorizationId,"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/StatementPermission.java",
"hunks": [
{
"added": [],
"header": "@@ -48,14 +48,12 @@ public abstract class StatementPermission",
"removed": [
"\t * @param authorizationId\tAuthorizationId",
"\t\t\t\t\t\t\t\tString authorizationId,"
]
},
{
"added": [],
"header": "@@ -108,7 +106,6 @@ public abstract class StatementPermission",
"removed": [
" String authorizationId,"
]
},
{
"added": [
"",
" PermissionsDescriptor perm =",
" getPermissionDescriptor( lcc.getCurrentUserId(activation), dd );"
],
"header": "@@ -117,8 +114,9 @@ public abstract class StatementPermission",
"removed": [
"\t\t",
"\t\tPermissionsDescriptor perm = getPermissionDescriptor( authorizationId, dd );"
]
},
{
"added": [
" (role, lcc.getCurrentUserId(activation), dbo);"
],
"header": "@@ -138,7 +136,7 @@ public abstract class StatementPermission",
"removed": [
"\t\t\t\t(role, authorizationId, dbo);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/StatementRoutinePermission.java",
"hunks": [
{
"added": [
""
],
"header": "@@ -1,3 +1,4 @@",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/StatementSchemaPermission.java",
"hunks": [
{
"added": [
" String currentUserId = lcc.getCurrentUserId(activation);"
],
"header": "@@ -61,13 +61,12 @@ public class StatementSchemaPermission extends StatementPermission",
"removed": [
"\t\t\t\t\t String authid,",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/iapi/sql/dictionary/StatementTablePermission.java",
"hunks": [
{
"added": [],
"header": "@@ -110,7 +110,6 @@ public class StatementTablePermission extends StatementPermission",
"removed": [
"\t\t\t\t\t String authorizationId,"
]
},
{
"added": [
" if (!hasPermissionOnTable(lcc, activation, forGrant, ps)) {",
" throw StandardException.newException(",
" (forGrant ? SQLState.AUTH_NO_TABLE_PERMISSION_FOR_GRANT",
" : SQLState.AUTH_NO_TABLE_PERMISSION),",
" lcc.getCurrentUserId(activation),",
" getPrivName(),",
" td.getSchemaName(),",
" td.getName());"
],
"header": "@@ -118,15 +117,15 @@ public class StatementTablePermission extends StatementPermission",
"removed": [
"\t\tif (!hasPermissionOnTable(lcc, activation,",
"\t\t\t\t\t\t\t\t\t authorizationId, forGrant, ps)) {",
"\t\t\tthrow StandardException.newException( forGrant ? SQLState.AUTH_NO_TABLE_PERMISSION_FOR_GRANT",
"\t\t\t\t\t\t\t\t\t\t\t\t : SQLState.AUTH_NO_TABLE_PERMISSION,",
"\t\t\t\t\t\t\t\t\t\t\t\t authorizationId,",
"\t\t\t\t\t\t\t\t\t\t\t\t getPrivName(),",
"\t\t\t\t\t\t\t\t\t\t\t\t td.getSchemaName(),",
"\t\t\t\t\t\t\t\t\t\t\t\t td.getName());"
]
},
{
"added": [
" String currentUserId = lcc.getCurrentUserId(activation);",
" currentUserId,"
],
"header": "@@ -145,26 +144,25 @@ public class StatementTablePermission extends StatementPermission",
"removed": [
"\t * @param authorizationId the id of the current user",
"\t\t\t\t\t\t\t\t\t\t String authorizationId,",
"\t\t\t\t\t\t\t\t\t\tauthorizationId,"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/jdbc/EmbedConnection.java",
"hunks": [
{
"added": [
" String username = lcc.getSessionUserId();"
],
"header": "@@ -1243,7 +1243,7 @@ public abstract class EmbedConnection implements EngineConnection",
"removed": [
"\t\t\tString username = lcc.getAuthorizationId();"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/catalog/DataDictionaryImpl.java",
"hunks": [
{
"added": [
" oldRai.hasDefinersRights(),"
],
"header": "@@ -2509,6 +2509,7 @@ public final class\tDataDictionaryImpl",
"removed": []
},
{
"added": [
" false, /* hasDefinersRights */"
],
"header": "@@ -7022,6 +7023,7 @@ public final class\tDataDictionaryImpl",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/CreateAliasNode.java",
"hunks": [
{
"added": [
" public static final int ROUTINE_SECURITY_DEFINER = RETURN_TYPE + 1;",
"",
" // Keep ROUTINE_ELEMENT_COUNT last (determines set cardinality).",
" // Note: Remember to also update the map ROUTINE_CLAUSE_NAMES in",
" // sqlgrammar.jj when elements are added.",
" public static final int ROUTINE_ELEMENT_COUNT =",
" ROUTINE_SECURITY_DEFINER + 1;",
""
],
"header": "@@ -64,8 +64,14 @@ public class CreateAliasNode extends DDLStatementNode",
"removed": [
" public static final int ROUTINE_ELEMENT_COUNT = RETURN_TYPE + 1;",
" "
]
},
{
"added": [
" Boolean definersRightsO =",
" (Boolean) routineElements[ROUTINE_SECURITY_DEFINER];",
" boolean definersRights =",
" (definersRightsO == null) ? false :",
" definersRightsO.booleanValue();",
""
],
"header": "@@ -198,6 +204,12 @@ public class CreateAliasNode extends DDLStatementNode",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/DropSchemaNode.java",
"hunks": [
{
"added": [
"import org.apache.derby.iapi.sql.conn.StatementContext;"
],
"header": "@@ -24,6 +24,7 @@ package\torg.apache.derby.impl.sql.compile;",
"removed": []
},
{
"added": [
" StatementContext stx = lcc.getStatementContext();",
" String currentUser = stx.getSQLSessionContext().getCurrentUser();"
],
"header": "@@ -61,7 +62,8 @@ public class DropSchemaNode extends DDLStatementNode",
"removed": [
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/SpecialFunctionNode.java",
"hunks": [
{
"added": [
" methodName = \"getCurrentUserId\";"
],
"header": "@@ -124,16 +124,14 @@ public class SpecialFunctionNode extends ValueNode",
"removed": [
"\t\tcase C_NodeTypes.SESSION_USER_NODE:",
"\t\t\t\tcase C_NodeTypes.SESSION_USER_NODE: sqlName = \"SESSION_USER\"; break;",
"\t\t\tmethodName = \"getAuthorizationId\";"
]
},
{
"added": [
" case C_NodeTypes.SESSION_USER_NODE:",
" methodName = \"getSessionUserId\";",
" methodType = \"java.lang.String\";",
" sqlName = \"SESSION_USER\";",
" dtd = DataDictionary.TYPE_SYSTEM_IDENTIFIER;",
" break;",
""
],
"header": "@@ -144,6 +142,13 @@ public class SpecialFunctionNode extends ValueNode",
"removed": []
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/compile/StaticMethodCallNode.java",
"hunks": [
{
"added": [
" /**",
" * Authorization id of user owning schema in which routine is defined.",
" */",
" private String routineDefiner = null;"
],
"header": "@@ -129,6 +129,10 @@ public class StaticMethodCallNode extends MethodCallNode",
"removed": []
},
{
"added": [
" routineDefiner = sd.getAuthorizationId();",
""
],
"header": "@@ -646,6 +650,8 @@ public class StaticMethodCallNode extends MethodCallNode",
"removed": []
},
{
"added": [
" private void generateSetupNestedSessionContext(",
" ActivationClassBuilder acb,",
" MethodBuilder mb,",
" boolean hadDefinersRights,",
" String definer) throws StandardException {"
],
"header": "@@ -662,8 +668,11 @@ public class StaticMethodCallNode extends MethodCallNode",
"removed": [
"\tprivate void generateSetupNestedSessionContext(ActivationClassBuilder acb,",
"\t\t\t\t\t\t\t\t\t\t\t\t MethodBuilder mb) {"
]
},
{
"added": [
" mb.push(hadDefinersRights);",
" mb.push(definer);",
" \"void\", 3);"
],
"header": "@@ -674,9 +683,11 @@ public class StaticMethodCallNode extends MethodCallNode",
"removed": [
"\t\t\t\t\t \"void\", 1);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/conn/GenericAuthorizer.java",
"hunks": [
{
"added": [
" GenericAuthorizer(LanguageConnectionContext lcc)"
],
"header": "@@ -61,14 +61,10 @@ implements Authorizer",
"removed": [
"\tprivate final String authorizationId; //the userName after parsing by IdUtil ",
"\t",
"\tGenericAuthorizer(String authorizationId, ",
"\t\t\t\t\t\t LanguageConnectionContext lcc)",
"\t\tthis.authorizationId = authorizationId;"
]
},
{
"added": [
" !lcc.getCurrentUserId(activation).equals(",
" dd.getAuthorizationDatabaseOwner()))"
],
"header": "@@ -148,7 +144,8 @@ implements Authorizer",
"removed": [
"\t\t\t\t!authorizationId.equals(dd.getAuthorizationDatabaseOwner()))"
]
},
{
"added": [
" (lcc, false, activation);"
],
"header": "@@ -184,7 +181,7 @@ implements Authorizer",
"removed": [
"\t\t\t\t\t\t\t\t(lcc, authorizationId, false, activation);"
]
},
{
"added": [],
"header": "@@ -229,14 +226,6 @@ implements Authorizer",
"removed": [
"\t/**",
"\t @see Authorizer#getAuthorizationId",
"\t */",
"\tpublic String getAuthorizationId()",
"\t{",
"\t\treturn authorizationId;",
"\t}",
""
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/conn/GenericLanguageConnectionContext.java",
"hunks": [
{
"added": [
" /**",
" * In contrast to current user id, which may change (inside a routine",
" * executing with definer's rights), the sessionUser is constant in a",
" * session.",
" */",
" private String sessionUser = null;",
"",
" sessionUser = IdUtil.getUserAuthorizationId(userName);",
" authorizer = new GenericAuthorizer(this);"
],
"header": "@@ -349,11 +349,19 @@ public class GenericLanguageConnectionContext",
"removed": [
"\t\tauthorizer = new GenericAuthorizer(IdUtil.getUserAuthorizationId(userName),this);"
]
},
{
"added": [
" if (getSessionUserId() == null)"
],
"header": "@@ -362,7 +370,7 @@ public class GenericLanguageConnectionContext",
"removed": [
"\t\t\tif (getAuthorizationId() == null)"
]
},
{
"added": [
" String authorizationId = getSessionUserId();",
" getSessionUserId(), getTransactionCompile(), false);",
" dd,",
" getSessionUserId(),",
" getSessionUserId(),",
" (UUID) null,",
" false);"
],
"header": "@@ -393,14 +401,18 @@ public class GenericLanguageConnectionContext",
"removed": [
"\t\t\tString authorizationId = getAuthorizationId();",
"\t\t\t\t\tauthorizationId, getTransactionCompile(), false);",
"\t\t\t\t\tdd, authorizationId, authorizationId, (UUID) null, false);"
]
},
{
"added": [
"",
" // Reset the current user",
" getCurrentSQLSessionContext().setUser(getSessionUserId());"
],
"header": "@@ -639,6 +651,9 @@ public class GenericLanguageConnectionContext",
"removed": []
},
{
"added": [
" /**",
" * Get the session user",
" *",
" * @return String the authorization id of the session user.",
" */",
" public String getSessionUserId()",
" {",
" return sessionUser;",
" }"
],
"header": "@@ -1872,15 +1887,15 @@ public class GenericLanguageConnectionContext",
"removed": [
"\t/**",
"\t *\tGet the Authorization Id (user)",
"\t *",
"\t * @return String\tthe authorization id",
"\t */",
"\tpublic String getAuthorizationId()",
"\t{ ",
"\t\treturn authorizer.getAuthorizationId();",
"\t}"
]
},
{
"added": [
" /**",
" * @see LanguageConnectionContext#getCurrentUserId(Activation a)",
" */",
" public String getCurrentUserId(Activation a) {",
" return getCurrentSQLSessionContext(a).getCurrentUser();",
" }",
"",
""
],
"header": "@@ -3324,6 +3339,14 @@ public class GenericLanguageConnectionContext",
"removed": []
},
{
"added": [
" if (!roleIsSettable(a, role)) {"
],
"header": "@@ -3336,7 +3359,7 @@ public class GenericLanguageConnectionContext",
"removed": [
"\t\t\t\tif (!roleIsSettable(role)) {"
]
},
{
"added": [
" public boolean roleIsSettable(Activation a, String role)",
" throws StandardException {",
"",
" DataDictionary dd = getDataDictionary();",
" String currentUser = getCurrentUserId(a);",
" if (currentUser.equals(dbo)) {",
" (role, currentUser, dbo);"
],
"header": "@@ -3357,17 +3380,20 @@ public class GenericLanguageConnectionContext",
"removed": [
"\tpublic boolean roleIsSettable(String role) throws StandardException {",
"\t\tDataDictionary dd = getDataDictionary();",
"\t\tif (getAuthorizationId().equals(dbo)) {",
"\t\t\t\t(role, getAuthorizationId(), dbo);"
]
},
{
"added": [
" public void setupNestedSessionContext(",
" Activation a,",
" boolean definersRights,",
" String definer) throws StandardException {",
"",
" setupSessionContextMinion(a, true, definersRights, definer);",
" private void setupSessionContextMinion(",
" Activation a,",
" boolean push,",
" boolean definersRights,",
" String definer) throws StandardException {",
"",
" if (SanityManager.DEBUG) {",
" if (definersRights) {",
" SanityManager.ASSERT(push);",
" }",
" }",
"",
" SQLSessionContext sc = a.setupSQLSessionContextForChildren(push);",
"",
" if (definersRights) {",
" sc.setUser(definer);",
" } else {",
" // A priori: invoker's rights: Current user",
" sc.setUser(getCurrentUserId(a));",
" }",
" if (definersRights) {",
" // No role a priori. Cf. SQL 2008, section 10.4 <routine",
" // invocation>, GR 5 j) i) 1) B) \"If the external security",
" // characteristic of R is DEFINER, then the top cell of the",
" // authorization stack of RSC is set to contain only the routine",
" // authorization identifier of R.",
"",
" sc.setRole(null);",
" } else {",
" // Semantics for roles dictate (SQL 4.34.1.1 and 4.27.3.) that the",
" // role is initially inherited from the current session context",
" // when we run with INVOKER security characteristic.",
" sc.setRole(getCurrentRoleId(a));",
" }",
"",
"",
" if (definersRights) {",
" SchemaDescriptor sd = getDataDictionary().getSchemaDescriptor(",
" definer,",
" getTransactionExecute(),",
" false);",
"",
" if (sd == null) {",
" sd = new SchemaDescriptor(",
" getDataDictionary(), definer, definer, (UUID) null, false);",
" }",
"",
" sc.setDefaultSchema(sd);",
" } else {",
" // Inherit current default schema. The initial value of the",
" // default schema is implementation defined. In Derby we",
" // inherit it when we invoke stored procedures and functions.",
" sc.setDefaultSchema(getDefaultSchema(a));",
" }"
],
"header": "@@ -3430,25 +3456,70 @@ public class GenericLanguageConnectionContext",
"removed": [
"\tpublic void setupNestedSessionContext(Activation a) {",
"\t\tsetupSessionContextMinion(a, true);",
"\tprivate void setupSessionContextMinion(Activation a,",
"\t\t\t\t\t\t\t\t\t\t\t\t boolean push) {",
"\t\tSQLSessionContext sc = a.setupSQLSessionContextForChildren(push);",
"\t\t// Semantics for roles dictate (SQL 4.34.1.1 and 4.27.3.) that the",
"\t\t// role is initially inherited from the current session",
"\t\t// context. (Since we always run with INVOKER security",
"\t\t// characteristic. Derby can't yet run with DEFINER's rights).",
"\t\t//",
"\t\tsc.setRole(getCurrentRoleId(a));",
"\t\t// Inherit current default schema. The initial value of the",
"\t\t// default schema is implementation defined. In Derby we",
"\t\t// inherit it when we invoke stored procedures and functions.",
"\t\tsc.setDefaultSchema(getDefaultSchema(a));"
]
},
{
"added": [
" public void setupSubStatementSessionContext(Activation a)",
" throws StandardException {",
"",
" setupSessionContextMinion(a, false, false, null);"
],
"header": "@@ -3472,8 +3543,10 @@ public class GenericLanguageConnectionContext",
"removed": [
"\tpublic void setupSubStatementSessionContext(Activation a) {",
"\t\tsetupSessionContextMinion(a, false);"
]
},
{
"added": [
" getInitialDefaultSchemaDescriptor(),",
" getSessionUserId());"
],
"header": "@@ -3483,7 +3556,8 @@ public class GenericLanguageConnectionContext",
"removed": [
"\t\t\t\tgetInitialDefaultSchemaDescriptor());"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/conn/SQLSessionContextImpl.java",
"hunks": [
{
"added": [
" private String currentUser;",
" public SQLSessionContextImpl (SchemaDescriptor sd, String currentUser) {",
" this.currentUser = currentUser;"
],
"header": "@@ -27,12 +27,14 @@ import org.apache.derby.iapi.sql.dictionary.SchemaDescriptor;",
"removed": [
" public SQLSessionContextImpl (SchemaDescriptor sd) {"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/DDLConstantAction.java",
"hunks": [
{
"added": [
" String currentUser = lcc.getCurrentUserId(activation);",
" if (! currentUser.equals( dd.getAuthorizationDatabaseOwner()) )"
],
"header": "@@ -322,13 +322,13 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\tString authId = lcc.getAuthorizationId();",
"\t\tif (!(lcc.getAuthorizationId().equals(dd.getAuthorizationDatabaseOwner())))"
]
},
{
"added": [
" permDesc = statPerm.getPermissionDescriptor(",
" currentUser, dd);",
""
],
"header": "@@ -391,7 +391,9 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\tpermDesc = statPerm.getPermissionDescriptor(lcc.getAuthorizationId(), dd);\t\t\t\t"
]
},
{
"added": [
" ! ((StatementColumnPermission)statPerm).",
" allColumnsCoveredByUserOrPUBLIC(",
" currentUser, dd))) {"
],
"header": "@@ -407,9 +409,9 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\t\t\t\t\t !(((StatementColumnPermission)statPerm).",
"\t\t\t\t\t\t\t\t\t\t allColumnsCoveredByUserOrPUBLIC",
"\t\t\t\t\t\t\t\t\t\t (lcc.getAuthorizationId(), dd)))) {"
]
},
{
"added": [
" if (! permDesc.checkOwner(currentUser) ) {",
"",
" dm.addDependency(dependent, permDesc,"
],
"header": "@@ -417,8 +419,9 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\t\tif (!(permDesc.checkOwner(lcc.getAuthorizationId()))) {",
"\t\t\t\t\t\t\tdm.addDependency(dependent, permDesc,"
]
},
{
"added": [
" if (! permDesc.checkOwner(currentUser))"
],
"header": "@@ -432,7 +435,7 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\tif (!(permDesc.checkOwner(lcc.getAuthorizationId())))"
]
},
{
"added": [
" getPUBLIClevelColPermsDescriptor(",
" currentUser, dd);"
],
"header": "@@ -448,8 +451,8 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\t\t\t\tgetPUBLIClevelColPermsDescriptor",
"\t\t\t\t\t\t\t\t (lcc.getAuthorizationId(), dd);"
]
},
{
"added": [
" allColumnsCoveredByUserOrPUBLIC(",
" currentUser, dd)) {"
],
"header": "@@ -465,8 +468,8 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\t\t\t\t\tallColumnsCoveredByUserOrPUBLIC",
"\t\t\t\t\t\t\t\t\t (lcc.getAuthorizationId(), dd)) {"
]
},
{
"added": [
" String currentUser = lcc.getCurrentUserId(activation);"
],
"header": "@@ -510,7 +513,7 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\tString authId = lcc.getAuthorizationId();"
]
},
{
"added": [
" dd.getRoleGrantDescriptor(role, currentUser, dbo);"
],
"header": "@@ -521,7 +524,7 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\tdd.getRoleGrantDescriptor(role, authId, dbo);"
]
},
{
"added": [
" String currentUser = lcc.getCurrentUserId(activation);",
" if (! currentUser.equals(dbo))"
],
"header": "@@ -638,13 +641,13 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\tString authId = lcc.getAuthorizationId();",
"\t\tif (!authId.equals(dbo))"
]
},
{
"added": [
" permDesc = statPerm.",
" getPermissionDescriptor(currentUser, dd);"
],
"header": "@@ -674,7 +677,8 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\tpermDesc = statPerm.getPermissionDescriptor(lcc.getAuthorizationId(), dd);\t\t\t\t"
]
},
{
"added": [
" ! ((StatementColumnPermission)statPerm).",
" allColumnsCoveredByUserOrPUBLIC(",
" currentUser, dd)) ) {"
],
"header": "@@ -688,9 +692,9 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\t\t\t\t\t !(((StatementColumnPermission)statPerm).",
"\t\t\t\t\t\t\t\t\t allColumnsCoveredByUserOrPUBLIC",
"\t\t\t\t\t\t\t\t\t\t (lcc.getAuthorizationId(), dd)))) {"
]
},
{
"added": [
" if (! permDesc.checkOwner(currentUser) ) {",
""
],
"header": "@@ -698,7 +702,8 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\t\tif (!(permDesc.checkOwner(lcc.getAuthorizationId()))) {"
]
},
{
"added": [
" if (! permDesc.checkOwner(currentUser) )"
],
"header": "@@ -713,7 +718,7 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\tif (!(permDesc.checkOwner(lcc.getAuthorizationId())))"
]
},
{
"added": [
" permDesc = statementColumnPermission.",
" getPUBLIClevelColPermsDescriptor(",
" currentUser, dd);"
],
"header": "@@ -738,7 +743,9 @@ abstract class DDLConstantAction implements ConstantAction",
"removed": [
"\t\t\t\t\t\t\tpermDesc = statementColumnPermission.getPUBLIClevelColPermsDescriptor(lcc.getAuthorizationId(), dd);"
]
}
]
},
{
"file": "java/engine/org/apache/derby/impl/sql/execute/SetRoleConstantAction.java",
"hunks": [
{
"added": [
" final String currentAuthId = lcc.getCurrentUserId(activation);"
],
"header": "@@ -101,7 +101,7 @@ class SetRoleConstantAction implements ConstantAction",
"removed": [
" final String currentAuthId = lcc.getAuthorizationId();"
]
}
]
},
{
"file": "java/testing/org/apache/derbyTesting/junit/JDBC.java",
"hunks": [
{
"added": [
"",
" /**",
" * Asserts that the current user is the same as the one specified.",
" *",
" * @param con connection to check schema in",
" * @param schema expected user name",
" * @throws SQLException if something goes wrong",
" */",
" public static void assertCurrentUser(Connection con, String user)",
" throws SQLException {",
" Statement stmt = con.createStatement();",
" try {",
" JDBC.assertSingleValueResultSet(",
" stmt.executeQuery(\"VALUES CURRENT_USER\"), user);",
" } finally {",
" stmt.close();",
" }",
" }",
""
],
"header": "@@ -1289,6 +1289,25 @@ public class JDBC {",
"removed": []
}
]
}
] |
derby-DERBY-4553-574d8e1f
|
DERBY-4553: In ij GETCURRENTROWNUMBER directly writes its result to output
Patch contributed by Sylvain Leroux <sylvain@chicoree.fr>
git-svn-id: https://svn.apache.org/repos/asf/db/derby/code/trunk@911952 13f79535-47bb-0310-9956-ffa450edef68
|
[
{
"file": "java/tools/org/apache/derby/impl/tools/ij/ijVectorResult.java",
"hunks": [
{
"added": [
" * This is an impl for a simple Vector of objects."
],
"header": "@@ -25,7 +25,7 @@ import java.util.Vector;",
"removed": [
" * This is an impl for a simple Vector of strings."
]
},
{
"added": [
"\t/**",
"\t * Initialize a new vector containing only one object.",
"\t */",
"\tijVectorResult(Object value, SQLWarning w) {",
"\t\tthis(new Vector(1), w);",
"\t\tvec.add(value);",
"\t}",
"",
"\t/**",
"\t * Initialize a new vector containing only one integer value.",
"\t */",
"\tijVectorResult(int value, SQLWarning w) {",
"\t\tthis(new Integer(value), w);",
"\t}",
"",
""
],
"header": "@@ -38,6 +38,22 @@ class ijVectorResult extends ijResultImpl {",
"removed": []
}
]
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.