index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/map/UpdateObserver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.map;
import java.util.Iterator;
import org.apache.fluo.api.client.TransactionBase;
import org.apache.fluo.recipes.core.combine.ChangeObserver;
import org.apache.fluo.recipes.core.combine.CombineQueue;
/**
* A {@link CollisionFreeMap} calls this to allow additional processing to be done when key values
* are updated. See the project level documentation for more information.
*
* @since 1.0.0
* @deprecated since 1.1.0 use {@link ChangeObserver} and {@link CombineQueue}
*/
@Deprecated
public abstract class UpdateObserver<K, V> {
public void init(String mapId, org.apache.fluo.api.observer.Observer.Context observerContext)
throws Exception {}
public abstract void updatingValues(TransactionBase tx, Iterator<Update<K, V>> updates);
}
| 6,000 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/map/CollisionFreeMapObserver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.map;
import org.apache.fluo.api.client.TransactionBase;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
/**
* This class is configured for use by CollisionFreeMap.configure(FluoConfiguration,
* CollisionFreeMap.Options) . This class should never have to be used directly.
*
* @since 1.0.0
* @deprecated since 1.1.0
*/
@Deprecated
public class CollisionFreeMapObserver extends org.apache.fluo.api.observer.AbstractObserver {
@SuppressWarnings("rawtypes")
private CollisionFreeMap cfm;
private String mapId;
public CollisionFreeMapObserver() {}
@Override
public void init(Context context) throws Exception {
this.mapId = context.getObserverConfiguration().getString("mapId");
cfm = CollisionFreeMap.getInstance(mapId, context.getAppConfiguration());
cfm.updateObserver.init(mapId, context);
}
@Override
public void process(TransactionBase tx, Bytes row, Column col) throws Exception {
cfm.process(tx, row, col);
}
@Override
public ObservedColumn getObservedColumn() {
// TODO constants
return new ObservedColumn(new Column("fluoRecipes", "cfm:" + mapId), NotificationType.WEAK);
}
}
| 6,001 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/ExportEntry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
// This class intentionally package private.
class ExportEntry {
byte[] key;
long seq;
byte[] value;
}
| 6,002 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/ExportBucket.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
import java.util.Iterator;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import org.apache.fluo.api.client.TransactionBase;
import org.apache.fluo.api.client.scanner.CellScanner;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Bytes.BytesBuilder;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.api.data.RowColumn;
import org.apache.fluo.api.data.RowColumnValue;
import org.apache.fluo.api.data.Span;
import org.apache.fluo.recipes.core.types.StringEncoder;
import org.apache.fluo.recipes.core.types.TypeLayer;
import org.apache.fluo.recipes.core.types.TypedTransactionBase;
/**
* This class encapsulates a buckets serialization code.
*/
// This class intentionally package private.
class ExportBucket {
private static final String NOTIFICATION_CF = "fluoRecipes";
private static final String NOTIFICATION_CQ_PREFIX = "eq:";
private static final Column EXPORT_COL = new Column("e", "v");
private static final Column NEXT_COL = new Column("e", "next");
static Column newNotificationColumn(String queueId) {
return new Column(NOTIFICATION_CF, NOTIFICATION_CQ_PREFIX + queueId);
}
private final TypedTransactionBase ttx;
private final String qid;
private final Bytes bucketRow;
static String genBucketId(int bucket, int maxBucket) {
Preconditions.checkArgument(bucket >= 0);
Preconditions.checkArgument(maxBucket > 0);
int bits = 32 - Integer.numberOfLeadingZeros(maxBucket);
int bucketLen = bits / 4 + (bits % 4 > 0 ? 1 : 0);
return Strings.padStart(Integer.toHexString(bucket), bucketLen, '0');
}
static Bytes generateBucketRow(String qid, int bucket, int numBuckets) {
return Bytes.of(qid + ":" + genBucketId(bucket, numBuckets));
}
ExportBucket(TransactionBase tx, String qid, int bucket, int numBuckets) {
// TODO encode in a more robust way... but for now fail early
Preconditions.checkArgument(!qid.contains(":"), "Export QID can not contain :");
this.ttx = new TypeLayer(new StringEncoder()).wrap(tx);
this.qid = qid;
this.bucketRow = generateBucketRow(qid, bucket, numBuckets);
}
ExportBucket(TransactionBase tx, Bytes bucketRow) {
this.ttx = new TypeLayer(new StringEncoder()).wrap(tx);
int colonLoc = -1;
for (int i = 0; i < bucketRow.length(); i++) {
if (bucketRow.byteAt(i) == ':') {
colonLoc = i;
break;
}
}
Preconditions.checkArgument(colonLoc != -1 && colonLoc != bucketRow.length(),
"Invalid bucket row " + bucketRow);
Preconditions.checkArgument(bucketRow.byteAt(bucketRow.length() - 1) == ':',
"Invalid bucket row " + bucketRow);
this.bucketRow = bucketRow.subSequence(0, bucketRow.length() - 1);
this.qid = bucketRow.subSequence(0, colonLoc).toString();
}
private static void encSeq(BytesBuilder bb, long l) {
bb.append((byte) (l >>> 56));
bb.append((byte) (l >>> 48));
bb.append((byte) (l >>> 40));
bb.append((byte) (l >>> 32));
bb.append((byte) (l >>> 24));
bb.append((byte) (l >>> 16));
bb.append((byte) (l >>> 8));
bb.append((byte) (l >>> 0));
}
private static long decodeSeq(Bytes seq) {
return (((long) seq.byteAt(0) << 56) + ((long) (seq.byteAt(1) & 255) << 48)
+ ((long) (seq.byteAt(2) & 255) << 40) + ((long) (seq.byteAt(3) & 255) << 32)
+ ((long) (seq.byteAt(4) & 255) << 24) + ((seq.byteAt(5) & 255) << 16)
+ ((seq.byteAt(6) & 255) << 8) + ((seq.byteAt(7) & 255) << 0));
}
public void add(long seq, byte[] key, byte[] value) {
BytesBuilder builder = Bytes.builder(bucketRow.length() + 1 + key.length + 8).append(bucketRow)
.append(':').append(key);
encSeq(builder, seq);
ttx.set(builder.toBytes(), EXPORT_COL, Bytes.of(value));
}
/**
* Computes the minimal row for a bucket
*/
private Bytes getMinimalRow() {
return Bytes.builder(bucketRow.length() + 1).append(bucketRow).append(':').toBytes();
}
public void notifyExportObserver() {
ttx.mutate().row(getMinimalRow()).col(newNotificationColumn(qid)).weaklyNotify();
}
public Iterator<ExportEntry> getExportIterator(Bytes continueRow) {
Span span;
if (continueRow != null) {
Span tmpSpan = Span.prefix(bucketRow);
Span nextSpan = new Span(new RowColumn(continueRow, EXPORT_COL), true, tmpSpan.getEnd(),
tmpSpan.isEndInclusive());
span = nextSpan;
} else {
span = Span.prefix(bucketRow);
}
CellScanner scanner = ttx.scanner().over(span).fetch(EXPORT_COL).build();
return new ExportIterator(scanner);
}
private class ExportIterator implements Iterator<ExportEntry> {
private Iterator<RowColumnValue> rowIter;
private Bytes lastRow;
public ExportIterator(CellScanner scanner) {
this.rowIter = scanner.iterator();
}
@Override
public boolean hasNext() {
return rowIter.hasNext();
}
@Override
public ExportEntry next() {
RowColumnValue rowColVal = rowIter.next();
Bytes row = rowColVal.getRow();
Bytes keyBytes = row.subSequence(bucketRow.length() + 1, row.length() - 8);
Bytes seqBytes = row.subSequence(row.length() - 8, row.length());
ExportEntry ee = new ExportEntry();
ee.key = keyBytes.toArray();
ee.seq = decodeSeq(seqBytes);
// TODO maybe leave as Bytes?
ee.value = rowColVal.getValue().toArray();
lastRow = row;
return ee;
}
@Override
public void remove() {
ttx.mutate().row(lastRow).col(EXPORT_COL).delete();
}
}
public Bytes getContinueRow() {
return ttx.get(getMinimalRow(), NEXT_COL);
}
public void setContinueRow(ExportEntry ee) {
BytesBuilder builder = Bytes.builder(bucketRow.length() + 1 + ee.key.length + 8)
.append(bucketRow).append(':').append(ee.key);
encSeq(builder, ee.seq);
Bytes nextRow = builder.toBytes();
ttx.set(getMinimalRow(), NEXT_COL, nextRow);
}
public void clearContinueRow() {
ttx.delete(getMinimalRow(), NEXT_COL);
}
}
| 6,003 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/MemLimitIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
import java.util.Iterator;
import java.util.NoSuchElementException;
// This class intentionally package private.
class MemLimitIterator implements Iterator<ExportEntry> {
private long memConsumed = 0;
private long memLimit;
private int extraPerKey;
private Iterator<ExportEntry> source;
public MemLimitIterator(Iterator<ExportEntry> input, long limit, int extraPerKey) {
this.source = input;
this.memLimit = limit;
this.extraPerKey = extraPerKey;
}
@Override
public boolean hasNext() {
return memConsumed < memLimit && source.hasNext();
}
@Override
public ExportEntry next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
ExportEntry ee = source.next();
memConsumed += ee.key.length + extraPerKey + ee.value.length;
return ee;
}
@Override
public void remove() {
source.remove();
}
}
| 6,004 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/SequencedExport.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
/**
* @since 1.0.0
*/
public class SequencedExport<K, V> extends Export<K, V> {
private final long seq;
SequencedExport(K k, V v, long seq) {
super(k, v);
this.seq = seq;
}
public long getSequence() {
return seq;
}
}
| 6,005 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/ExportObserver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
import org.apache.fluo.api.client.TransactionBase;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.recipes.core.serialization.SimpleSerializer;
/**
* @since 1.0.0
* @deprecated since 1.1.0
*/
@Deprecated
public class ExportObserver<K, V> extends org.apache.fluo.api.observer.AbstractObserver {
private ExportObserverImpl<K, V> eoi;
private String queueId;
protected String getQueueId() {
return queueId;
}
@SuppressWarnings("unchecked")
@Override
public void init(Context context) throws Exception {
queueId = context.getObserverConfiguration().getString("queueId");
ExportQueue.Options opts = new ExportQueue.Options(queueId, context.getAppConfiguration());
// TODO defer loading classes... so that not done during fluo init
// TODO move class loading to centralized place... also attempt to check type params
@SuppressWarnings("rawtypes")
Exporter exporter = getClass().getClassLoader().loadClass(opts.fluentCfg.exporterType)
.asSubclass(Exporter.class).newInstance();
SimpleSerializer serializer = SimpleSerializer.getInstance(context.getAppConfiguration());
exporter.init(new Exporter.Context() {
@Override
public String getQueueId() {
return queueId;
}
@Override
public SimpleConfiguration getExporterConfiguration() {
return opts.getExporterConfiguration();
}
@Override
public Context getObserverContext() {
return context;
}
});
this.eoi =
new ExportObserverImpl<K, V>(queueId, opts.fluentCfg, serializer, exporter::processExports);
}
@Override
public ObservedColumn getObservedColumn() {
return new ObservedColumn(ExportBucket.newNotificationColumn(queueId), NotificationType.WEAK);
}
@Override
public void process(TransactionBase tx, Bytes row, Column col) throws Exception {
eoi.process(tx, row, col);
}
}
| 6,006 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/ExportObserverImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
import java.util.Iterator;
import com.google.common.collect.Iterators;
import org.apache.fluo.api.client.TransactionBase;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.api.observer.Observer;
import org.apache.fluo.recipes.core.serialization.SimpleSerializer;
// This class intentionally package private.
class ExportObserverImpl<K, V> implements Observer {
private String queueId;
private Class<K> keyType;
private Class<V> valType;
SimpleSerializer serializer;
private org.apache.fluo.recipes.core.export.function.Exporter<K, V> exporter;
private long memLimit;
@SuppressWarnings("unchecked")
ExportObserverImpl(String queueId, FluentConfigurator opts, SimpleSerializer serializer,
org.apache.fluo.recipes.core.export.function.Exporter<K, V> exportConsumer) throws Exception {
this.queueId = queueId;
// TODO move class loading to centralized place... also attempt to check type params
keyType = (Class<K>) getClass().getClassLoader().loadClass(opts.keyType);
valType = (Class<V>) getClass().getClassLoader().loadClass(opts.valueType);
exporter = exportConsumer;
this.serializer = serializer;
memLimit = opts.getBufferSize();
}
@Override
public void process(TransactionBase tx, Bytes row, Column col) throws Exception {
ExportBucket bucket = new ExportBucket(tx, row);
Bytes continueRow = bucket.getContinueRow();
Iterator<ExportEntry> input = bucket.getExportIterator(continueRow);
MemLimitIterator memLimitIter = new MemLimitIterator(input, memLimit, 8 + queueId.length());
Iterator<SequencedExport<K, V>> exportIterator = Iterators.transform(memLimitIter,
ee -> new SequencedExport<>(serializer.deserialize(ee.key, keyType),
serializer.deserialize(ee.value, valType), ee.seq));
exportIterator = Iterators.consumingIterator(exportIterator);
exporter.export(exportIterator);
if (input.hasNext() || continueRow != null) {
// not everything was processed so notify self OR new data may have been inserted above the
// continue row
bucket.notifyExportObserver();
}
if (input.hasNext()) {
if (!memLimitIter.hasNext()) {
// stopped because of mem limit... set continue key
bucket.setContinueRow(input.next());
continueRow = null;
}
}
if (continueRow != null) {
bucket.clearContinueRow();
}
}
}
| 6,007 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/Export.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
import java.util.Objects;
/**
* @since 1.0.0
*/
public class Export<K, V> {
private final K key;
private final V value;
public Export(K key, V val) {
Objects.requireNonNull(key);
Objects.requireNonNull(val);
this.key = key;
this.value = val;
}
public K getKey() {
return key;
}
public V getValue() {
return value;
}
}
| 6,008 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/FluentConfigurator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
import java.util.Objects;
import com.google.common.base.Preconditions;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.recipes.core.common.RowRange;
import org.apache.fluo.recipes.core.common.TableOptimizations;
import org.apache.fluo.recipes.core.common.TransientRegistry;
import org.apache.fluo.recipes.core.export.ExportQueue.FluentArg1;
import org.apache.fluo.recipes.core.export.ExportQueue.FluentArg2;
import org.apache.fluo.recipes.core.export.ExportQueue.FluentArg3;
import org.apache.fluo.recipes.core.export.ExportQueue.FluentOptions;
import org.apache.fluo.recipes.core.export.ExportQueue.Optimizer;
// This class intentionally package private.
class FluentConfigurator implements FluentArg1, FluentArg2, FluentArg3, FluentOptions {
static final long DEFAULT_BUFFER_SIZE = 1 << 20;
static final int DEFAULT_BUCKETS_PER_TABLET = 10;
static final String PREFIX = "recipes.exportQueue.";
String queueId;
Long bufferSize = null;
int buckets;
String valueType;
Integer bucketsPerTablet = null;
String keyType;
String exporterType;
FluentConfigurator(String queueId) {
this.queueId = queueId;
}
@Override
public FluentOptions bufferSize(long bufferSize) {
Preconditions.checkArgument(bufferSize > 0, "Buffer size must be positive");
this.bufferSize = bufferSize;
return this;
}
@Override
public FluentOptions bucketsPerTablet(int bucketsPerTablet) {
Preconditions.checkArgument(bucketsPerTablet > 0,
"bucketsPerTablet is <= 0 : " + bucketsPerTablet);
this.bucketsPerTablet = bucketsPerTablet;
return this;
}
void save(SimpleConfiguration appConfig) {
appConfig.setProperty(PREFIX + queueId + ".buckets", buckets + "");
appConfig.setProperty(PREFIX + queueId + ".key", keyType);
appConfig.setProperty(PREFIX + queueId + ".val", valueType);
if (exporterType != null) {
appConfig.setProperty(PREFIX + queueId + ".exporter", exporterType);
}
if (bufferSize != null) {
appConfig.setProperty(PREFIX + queueId + ".bufferSize", bufferSize);
}
if (bucketsPerTablet != null) {
appConfig.setProperty(PREFIX + queueId + ".bucketsPerTablet", bucketsPerTablet);
}
Bytes exportRangeStart = Bytes.of(queueId + ExportQueue.RANGE_BEGIN);
Bytes exportRangeStop = Bytes.of(queueId + ExportQueue.RANGE_END);
new TransientRegistry(appConfig).addTransientRange("exportQueue." + queueId,
new RowRange(exportRangeStart, exportRangeStop));
TableOptimizations.registerOptimization(appConfig, queueId, Optimizer.class);
}
@Override
public void save(FluoConfiguration fluoConfig) {
save(fluoConfig.getAppConfiguration());
}
static FluentConfigurator load(String queueId, SimpleConfiguration appConfig) {
FluentConfigurator fc = new FluentConfigurator(queueId);
fc.buckets = appConfig.getInt(PREFIX + queueId + ".buckets");
fc.keyType = appConfig.getString(PREFIX + queueId + ".key");
fc.valueType = appConfig.getString(PREFIX + queueId + ".val");
fc.bufferSize = appConfig.getLong(PREFIX + queueId + ".bufferSize", DEFAULT_BUFFER_SIZE);
fc.bucketsPerTablet =
appConfig.getInt(PREFIX + queueId + ".bucketsPerTablet", DEFAULT_BUCKETS_PER_TABLET);
fc.exporterType = appConfig.getString(PREFIX + queueId + ".exporter", null);
return fc;
}
long getBufferSize() {
if (bufferSize == null) {
return DEFAULT_BUFFER_SIZE;
}
return bufferSize;
}
int getBucketsPerTablet() {
if (bucketsPerTablet == null) {
return DEFAULT_BUCKETS_PER_TABLET;
}
return bucketsPerTablet;
}
@Override
public FluentOptions buckets(int numBuckets) {
Preconditions.checkArgument(numBuckets > 0);
this.buckets = numBuckets;
return this;
}
@Override
public FluentArg3 valueType(String valueType) {
this.valueType = Objects.requireNonNull(valueType);
return this;
}
@Override
public FluentArg3 valueType(Class<?> valueType) {
this.valueType = valueType.getName();
return this;
}
@Override
public FluentArg2 keyType(String keyType) {
this.keyType = Objects.requireNonNull(keyType);
return this;
}
@Override
public FluentArg2 keyType(Class<?> keyType) {
this.keyType = keyType.getName();
return this;
}
}
| 6,009 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/ExportQueue.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Pattern;
import com.google.common.base.Preconditions;
import com.google.common.hash.Hashing;
import org.apache.fluo.api.client.TransactionBase;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.observer.Observer;
import org.apache.fluo.api.observer.Observer.NotificationType;
import org.apache.fluo.api.observer.ObserverProvider;
import org.apache.fluo.recipes.core.common.TableOptimizations;
import org.apache.fluo.recipes.core.common.TableOptimizations.TableOptimizationsFactory;
import org.apache.fluo.recipes.core.serialization.SimpleSerializer;
/**
* @since 1.0.0
*/
public class ExportQueue<K, V> {
static final String RANGE_BEGIN = "#";
static final String RANGE_END = ":~";
private int numBuckets;
private SimpleSerializer serializer;
private String queueId;
private FluentConfigurator opts;
// usage hint : could be created once in an observers init method
// usage hint : maybe have a queue for each type of data being exported???
// maybe less queues are
// more efficient though because more batching at export time??
ExportQueue(FluentConfigurator opts, SimpleSerializer serializer) throws Exception {
// TODO sanity check key type based on type params
// TODO defer creating classes until needed.. so that its not done during Fluo init
this.queueId = opts.queueId;
this.numBuckets = opts.buckets;
this.serializer = serializer;
this.opts = opts;
}
public void add(TransactionBase tx, K key, V value) {
addAll(tx, Collections.singleton(new Export<>(key, value)).iterator());
}
public void addAll(TransactionBase tx, Iterator<Export<K, V>> exports) {
Set<Integer> bucketsNotified = new HashSet<>();
while (exports.hasNext()) {
Export<K, V> export = exports.next();
byte[] k = serializer.serialize(export.getKey());
byte[] v = serializer.serialize(export.getValue());
int hash = Hashing.murmur3_32().hashBytes(k).asInt();
int bucketId = Math.abs(hash % numBuckets);
ExportBucket bucket = new ExportBucket(tx, queueId, bucketId, numBuckets);
bucket.add(tx.getStartTimestamp(), k, v);
if (!bucketsNotified.contains(bucketId)) {
bucket.notifyExportObserver();
bucketsNotified.add(bucketId);
}
}
}
// TODO maybe add for stream and iterable
public static <K2, V2> ExportQueue<K2, V2> getInstance(String exportQueueId,
SimpleConfiguration appConfig) {
FluentConfigurator opts = FluentConfigurator.load(exportQueueId, appConfig);
try {
return new ExportQueue<>(opts, SimpleSerializer.getInstance(appConfig));
} catch (Exception e) {
// TODO
throw new RuntimeException(e);
}
}
/**
* Part of a fluent API for configuring a export queue.
*
* @since 1.1.0
*/
public static interface FluentArg1 {
public FluentArg2 keyType(String keyType);
public FluentArg2 keyType(Class<?> keyType);
}
/**
* Part of a fluent API for configuring a export queue.
*
* @since 1.1.0
*/
public static interface FluentArg2 {
public FluentArg3 valueType(String keyType);
public FluentArg3 valueType(Class<?> keyType);
}
/**
* Part of a fluent API for configuring a export queue.
*
* @since 1.1.0
*/
public static interface FluentArg3 {
FluentOptions buckets(int numBuckets);
}
/**
* Part of a fluent API for configuring a export queue.
*
* @since 1.1.0
*/
public static interface FluentOptions {
/**
* Sets a limit on the amount of serialized updates to read into memory. Additional memory will
* be used to actually deserialize and process the updates. This limit does not account for
* object overhead in java, which can be significant.
*
* <p>
* The way memory read is calculated is by summing the length of serialized key and value byte
* arrays. Once this sum exceeds the configured memory limit, no more export key values are
* processed in the current transaction. When not everything is processed, the observer
* processing exports will notify itself causing another transaction to continue processing
* later.
*/
public FluentOptions bufferSize(long bufferSize);
/**
* Sets the number of buckets per tablet to generate. This affects how many split points will be
* generated when optimizing the Accumulo table.
*/
public FluentOptions bucketsPerTablet(int bucketsPerTablet);
/**
* Adds properties to the Fluo application configuration for this CombineQueue.
*/
public void save(FluoConfiguration fluoConfig);
}
/**
* A Fluent API for configuring an Export Queue. Use this method in conjunction with
* {@link #registerObserver(ObserverProvider.Registry, org.apache.fluo.recipes.core.export.function.Exporter)}
*
* @param exportQueueId An id that uniquely identifies an export queue. This id is used in the
* keys in the Fluo table and in the keys in the Fluo application configuration.
* @since 1.1.0
*/
public static FluentArg1 configure(String exportQueueId) {
return new FluentConfigurator(Objects.requireNonNull(exportQueueId));
}
/**
* Call this method before initializing Fluo.
*
* @param fluoConfig The configuration that will be used to initialize fluo.
* @deprecated since 1.1.0 use {@link #configure(String)} and
* {@link #registerObserver(ObserverProvider.Registry, org.apache.fluo.recipes.core.export.function.Exporter)}
* instead.
*/
@Deprecated
public static void configure(FluoConfiguration fluoConfig, Options opts) {
SimpleConfiguration appConfig = fluoConfig.getAppConfiguration();
opts.save(appConfig);
fluoConfig.addObserver(
new org.apache.fluo.api.config.ObserverSpecification(ExportObserver.class.getName(),
Collections.singletonMap("queueId", opts.fluentCfg.queueId)));
}
/**
* @since 1.0.0
*/
public static class Optimizer implements TableOptimizationsFactory {
/**
* Return suggested Fluo table optimizations for the specified export queue.
*
* @param appConfig Must pass in the application configuration obtained from
* {@code FluoClient.getAppConfiguration()} or
* {@code FluoConfiguration.getAppConfiguration()}
*/
@Override
public TableOptimizations getTableOptimizations(String queueId, SimpleConfiguration appConfig) {
FluentConfigurator opts = FluentConfigurator.load(queueId, appConfig);
List<Bytes> splits = new ArrayList<>();
Bytes exportRangeStart = Bytes.of(opts.queueId + RANGE_BEGIN);
Bytes exportRangeStop = Bytes.of(opts.queueId + RANGE_END);
splits.add(exportRangeStart);
splits.add(exportRangeStop);
List<Bytes> exportSplits = new ArrayList<>();
for (int i = opts.getBucketsPerTablet(); i < opts.buckets; i += opts.getBucketsPerTablet()) {
exportSplits.add(ExportBucket.generateBucketRow(opts.queueId, i, opts.buckets));
}
Collections.sort(exportSplits);
splits.addAll(exportSplits);
TableOptimizations tableOptim = new TableOptimizations();
tableOptim.setSplits(splits);
// the tablet with end row <queueId># does not contain any data for the export queue and
// should not be grouped with the export queue
tableOptim.setTabletGroupingRegex(Pattern.quote(queueId + ":"));
return tableOptim;
}
}
/**
* Registers an observer that will export queued data. Use this method in conjunction with
* {@link ExportQueue#configure(String)}.
*
* @since 1.1.0
*/
public void registerObserver(ObserverProvider.Registry obsRegistry,
org.apache.fluo.recipes.core.export.function.Exporter<K, V> exporter) {
Preconditions.checkState(opts.exporterType == null,
"Expected exporter type not be set, it was set to %s. Cannot not use the old and new way of configuring "
+ "exporters at the same time.",
opts.exporterType);
Observer obs;
try {
obs = new ExportObserverImpl<K, V>(queueId, opts, serializer, exporter);
} catch (Exception e) {
throw new RuntimeException(e);
}
obsRegistry.forColumn(ExportBucket.newNotificationColumn(queueId), NotificationType.WEAK)
.withId("exportq-" + queueId).useObserver(obs);
}
/**
* @since 1.0.0
* @deprecated since 1.1.0 use {@link ExportQueue#configure(String)}
*/
@Deprecated
public static class Options {
private static final String PREFIX = FluentConfigurator.PREFIX;
FluentConfigurator fluentCfg;
SimpleConfiguration exporterConfig;
Options(String queueId, SimpleConfiguration appConfig) {
fluentCfg = FluentConfigurator.load(queueId, appConfig);
this.exporterConfig = appConfig.subset(PREFIX + queueId + ".exporterCfg");
}
public Options(String queueId, String exporterType, String keyType, String valueType,
int buckets) {
this(queueId, keyType, valueType, buckets);
fluentCfg.exporterType = Objects.requireNonNull(exporterType);
}
public <K, V> Options(String queueId, Class<? extends Exporter<K, V>> exporter,
Class<K> keyType, Class<V> valueType, int buckets) {
this(queueId, exporter.getName(), keyType.getName(), valueType.getName(), buckets);
}
// intentionally package private
Options(String queueId, String keyType, String valueType, int buckets) {
Preconditions.checkArgument(buckets > 0);
this.fluentCfg = (FluentConfigurator) new FluentConfigurator(queueId).keyType(keyType)
.valueType(valueType).buckets(buckets);
}
/**
* Sets a limit on the amount of serialized updates to read into memory. Additional memory will
* be used to actually deserialize and process the updates. This limit does not account for
* object overhead in java, which can be significant.
*
* <p>
* The way memory read is calculated is by summing the length of serialized key and value byte
* arrays. Once this sum exceeds the configured memory limit, no more export key values are
* processed in the current transaction. When not everything is processed, the observer
* processing exports will notify itself causing another transaction to continue processing
* later.
*/
public Options setBufferSize(long bufferSize) {
fluentCfg.bufferSize(bufferSize);
return this;
}
long getBufferSize() {
return fluentCfg.getBufferSize();
}
/**
* Sets the number of buckets per tablet to generate. This affects how many split points will be
* generated when optimizing the Accumulo table.
*
*/
public Options setBucketsPerTablet(int bucketsPerTablet) {
fluentCfg.bucketsPerTablet(bucketsPerTablet);
return this;
}
int getBucketsPerTablet() {
return fluentCfg.getBucketsPerTablet();
}
public Options setExporterConfiguration(SimpleConfiguration config) {
Objects.requireNonNull(config);
this.exporterConfig = config;
return this;
}
public SimpleConfiguration getExporterConfiguration() {
if (exporterConfig == null) {
return new SimpleConfiguration();
}
return exporterConfig;
}
public String getQueueId() {
return fluentCfg.queueId;
}
void save(SimpleConfiguration appConfig) {
fluentCfg.save(appConfig);
if (exporterConfig != null) {
Iterator<String> keys = exporterConfig.getKeys();
while (keys.hasNext()) {
String key = keys.next();
appConfig.setProperty(PREFIX + fluentCfg.queueId + ".exporterCfg." + key,
exporterConfig.getRawString(key));
}
}
}
}
}
| 6,010 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/Exporter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export;
import java.util.Iterator;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.observer.Observer;
/**
* @since 1.0.0
* @deprecated since 1.1.0 replaced by {@link org.apache.fluo.recipes.core.export.function.Exporter}
*/
@Deprecated
public abstract class Exporter<K, V> {
public interface Context {
String getQueueId();
SimpleConfiguration getExporterConfiguration();
Observer.Context getObserverContext();
}
public void init(Exporter.Context exporterContext) throws Exception {}
/**
* Must be able to handle same key being exported multiple times and key being exported out of
* order. The sequence number is meant to help with this.
*
* <p>
* If multiple export entries with the same key are passed in, then the entries with the same key
* will be consecutive and in ascending sequence order.
*
* <p>
* If the call to process exports is unexpectedly terminated, it will be called again later with
* at least the same data. For example suppose an exporter was passed the following entries.
*
* <ul>
* <li>key=0 sequence=9 value=abc
* <li>key=1 sequence=13 value=d
* <li>key=1 sequence=17 value=e
* <li>key=1 sequence=23 value=f
* <li>key=2 sequence=19 value=x
* </ul>
*
* <p>
* Assume the exporter exports some of these and then fails before completing all of them. The
* next time its called it will be passed what it saw before, but it could also be passed more.
*
* <ul>
* <li>key=0 sequence=9 value=abc
* <li>key=1 sequence=13 value=d
* <li>key=1 sequence=17 value=e
* <li>key=1 sequence=23 value=f
* <li>key=1 sequence=29 value=g
* <li>key=2 sequence=19 value=x
* <li>key=2 sequence=77 value=y
* </ul>
*
*/
protected abstract void processExports(Iterator<SequencedExport<K, V>> exports);
}
| 6,011 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/export/function/Exporter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.export.function;
import java.util.Iterator;
import java.util.Objects;
import org.apache.fluo.recipes.core.export.ExportQueue;
import org.apache.fluo.recipes.core.export.SequencedExport;
/**
* Must be able to handle same key being exported multiple times and keys being exported out of
* order. The sequence number is meant to help with this.
*
* <p>
* If multiple export entries with the same key are passed in, then the entries with the same key
* will be consecutive and in ascending sequence order.
*
* <p>
* If the call to process exports is unexpectedly terminated, it will be called again later with at
* least the same data. For example suppose an exporter was passed the following entries.
*
* <ul>
* <li>key=0 sequence=9 value=abc
* <li>key=1 sequence=13 value=d
* <li>key=1 sequence=17 value=e
* <li>key=1 sequence=23 value=f
* <li>key=2 sequence=19 value=x
* </ul>
*
* <p>
* Assume the exporter exports some of these and then fails before completing all of them. The next
* time its called it will be passed what it saw before, but it could also be passed more.
*
* <ul>
* <li>key=0 sequence=9 value=abc
* <li>key=1 sequence=13 value=d
* <li>key=1 sequence=17 value=e
* <li>key=1 sequence=23 value=f
* <li>key=1 sequence=29 value=g
* <li>key=2 sequence=19 value=x
* <li>key=2 sequence=77 value=y
* </ul>
*
* @since 1.1.0
* @see ExportQueue#registerObserver(org.apache.fluo.api.observer.ObserverProvider.Registry,
* Exporter)
*/
@FunctionalInterface
public interface Exporter<K, V> {
/**
* Performs this export operation.
*
* @param exports an iterator over the data to export
*/
void export(Iterator<SequencedExport<K, V>> exports);
/**
* Returns a composed {@code Exporter} that exports, in sequence, to this then to {@code after}.
* If performing either export throws an exception, it is relayed to the caller of the composed
* operation. If performing this export operation throws an exception, the {@code after} export
* will not be performed.
*
* @param after the export operation to perform after this operation
* @return a composed {@code Exporter} that performs in sequence this export operation followed by
* the {@code after} export operation
* @throws NullPointerException if {@code after} is null
*/
default Exporter<K, V> andThen(Exporter<K, V> after) {
Objects.requireNonNull(after);
return (Iterator<SequencedExport<K, V>> i) -> {
export(i);
after.export(i);
};
}
}
| 6,012 |
0 | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core | Create_ds/fluo-recipes/modules/core/src/main/java/org/apache/fluo/recipes/core/data/RowHasher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.data;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.hash.Hashing;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Bytes.BytesBuilder;
import org.apache.fluo.recipes.core.common.TableOptimizations;
import org.apache.fluo.recipes.core.common.TableOptimizations.TableOptimizationsFactory;
/**
* This recipe provides code to help add a hash of the row as a prefix of the row. Using this recipe
* rows are structured like the following.
*
* <p>
* {@code <prefix>:<fixed len row hash>:<user row>}
*
* <p>
* The recipe also provides code the help generate split points and configure balancing of the
* prefix.
*
* <p>
* The project documentation has more information.
*
* <p>
* A single instance of RowHasher is thread safe. Creating a single static instance and using it can
* result in good performance.
*
* @since 1.0.0
*/
public class RowHasher {
private static final int HASH_LEN = 4;
private static final String PREFIX = "recipes.rowHasher.";
public static class Optimizer implements TableOptimizationsFactory {
@Override
public TableOptimizations getTableOptimizations(String key, SimpleConfiguration appConfig) {
int numTablets = appConfig.getInt(PREFIX + key + ".numTablets");
String prefix = key + ":";
List<Bytes> splits = new ArrayList<>(numTablets - 1);
int numSplits = numTablets - 1;
int distance = (((int) Math.pow(Character.MAX_RADIX, HASH_LEN) - 1) / numTablets) + 1;
int split = distance;
for (int i = 0; i < numSplits; i++) {
splits.add(Bytes.of(prefix
+ Strings.padStart(Integer.toString(split, Character.MAX_RADIX), HASH_LEN, '0')));
split += distance;
}
splits.add(Bytes.of(prefix + "~"));
TableOptimizations tableOptim = new TableOptimizations();
tableOptim.setSplits(splits);
tableOptim.setTabletGroupingRegex(Pattern.quote(prefix.toString()));
return tableOptim;
}
}
/**
* This method can be called to register table optimizations before initializing Fluo. This will
* register {@link Optimizer} with
* {@link TableOptimizations#registerOptimization(SimpleConfiguration, String, Class)}. See the
* project level documentation for an example.
*
* @param fluoConfig The config that will be used to initialize Fluo
* @param prefix The prefix used for your Row Hasher. If you have a single instance, could call
* {@link RowHasher#getPrefix()}.
* @param numTablets Initial number of tablet to create.
*/
public static void configure(FluoConfiguration fluoConfig, String prefix, int numTablets) {
fluoConfig.getAppConfiguration().setProperty(PREFIX + prefix + ".numTablets", numTablets);
TableOptimizations.registerOptimization(fluoConfig.getAppConfiguration(), prefix,
Optimizer.class);
}
private ThreadLocal<BytesBuilder> builders;
private Bytes prefixBytes;
private String prefix;
public RowHasher(String prefix) {
this.prefix = prefix;
this.prefixBytes = Bytes.of(prefix + ":");
builders = ThreadLocal.withInitial(() -> {
BytesBuilder bb = Bytes.builder(prefixBytes.length() + 5 + 32);
bb.append(prefixBytes);
return bb;
});
}
public String getPrefix() {
return prefix;
}
/**
* @return Returns input with prefix and hash of input prepended.
*/
public Bytes addHash(String row) {
return addHash(Bytes.of(row));
}
/**
* @return Returns input with prefix and hash of input prepended.
*/
public Bytes addHash(Bytes row) {
BytesBuilder builder = builders.get();
builder.setLength(prefixBytes.length());
builder.append(genHash(row));
builder.append(":");
builder.append(row);
return builder.toBytes();
}
private boolean hasHash(Bytes row) {
for (int i = prefixBytes.length(); i < prefixBytes.length() + HASH_LEN; i++) {
byte b = row.byteAt(i);
boolean isAlphaNum = (b >= 'a' && b <= 'z') || (b >= '0' && b <= '9');
if (!isAlphaNum) {
return false;
}
}
if (row.byteAt(prefixBytes.length() - 1) != ':'
|| row.byteAt(prefixBytes.length() + HASH_LEN) != ':') {
return false;
}
return true;
}
/**
* @return Returns input with prefix and hash stripped from beginning.
*/
public Bytes removeHash(Bytes row) {
Preconditions.checkArgument(row.length() >= prefixBytes.length() + 5,
"Row is shorter than expected " + row);
Preconditions.checkArgument(row.subSequence(0, prefixBytes.length()).equals(prefixBytes),
"Row does not have expected prefix " + row);
Preconditions.checkArgument(hasHash(row), "Row does not have expected hash " + row);
return row.subSequence(prefixBytes.length() + 5, row.length());
}
private static String genHash(Bytes row) {
int hash = Hashing.murmur3_32().hashBytes(row.toArray()).asInt();
hash = hash & 0x7fffffff;
// base 36 gives a lot more bins in 4 bytes than hex, but it is still human readable which is
// nice for debugging.
String hashString =
Strings.padStart(Integer.toString(hash, Character.MAX_RADIX), HASH_LEN, '0');
hashString = hashString.substring(hashString.length() - HASH_LEN);
return hashString;
}
}
| 6,013 |
0 | Create_ds/fluo-recipes/modules/test/src/test/java/org/apache/fluo/recipes/test | Create_ds/fluo-recipes/modules/test/src/test/java/org/apache/fluo/recipes/test/export/AccumuloExporterIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.test.export;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.minicluster.MiniAccumuloCluster;
import org.apache.fluo.api.client.FluoClient;
import org.apache.fluo.api.client.FluoFactory;
import org.apache.fluo.api.client.Transaction;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.mini.MiniFluo;
import org.apache.fluo.api.observer.ObserverProvider;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter;
import org.apache.fluo.recipes.core.export.ExportQueue;
import org.apache.fluo.recipes.test.AccumuloExportITBase;
import org.apache.hadoop.io.Text;
import org.junit.Assert;
import org.junit.Test;
public class AccumuloExporterIT extends AccumuloExportITBase {
private String exportTable;
public static final String QUEUE_ID = "aeqt";
public static class AccumuloExporterObserverProvider implements ObserverProvider {
@Override
public void provide(Registry obsRegistry, Context ctx) {
SimpleConfiguration appCfg = ctx.getAppConfiguration();
ExportQueue<String, String> teq = ExportQueue.getInstance(QUEUE_ID, appCfg);
teq.registerObserver(obsRegistry,
new AccumuloExporter<>(QUEUE_ID, appCfg, (export, mutConsumer) -> {
Mutation m = new Mutation(export.getKey());
m.put("cf", "cq", export.getSequence(), export.getValue());
mutConsumer.accept(m);
}));
}
}
@Override
public void preFluoInitHook() throws Exception {
// create and configure export table
exportTable = "export" + tableCounter.getAndIncrement();
getAccumuloConnector().tableOperations().create(exportTable);
MiniAccumuloCluster miniAccumulo = getMiniAccumuloCluster();
getFluoConfiguration().setObserverProvider(AccumuloExporterObserverProvider.class);
ExportQueue.configure(QUEUE_ID).keyType(String.class).valueType(String.class).buckets(5)
.bucketsPerTablet(1).save(getFluoConfiguration());
AccumuloExporter.configure(QUEUE_ID)
.instance(miniAccumulo.getInstanceName(), miniAccumulo.getZooKeepers())
.credentials(ACCUMULO_USER, ACCUMULO_PASSWORD).table(exportTable)
.save(getFluoConfiguration());
}
@Test
public void testAccumuloExport() throws Exception {
ExportQueue<String, String> teq =
ExportQueue.getInstance(QUEUE_ID, getFluoConfiguration().getAppConfiguration());
Assert.assertEquals(6, getFluoSplits().size());
MiniFluo miniFluo = getMiniFluo();
try (FluoClient fc = FluoFactory.newClient(miniFluo.getClientConfiguration())) {
Map<String, String> expected = new HashMap<>();
try (Transaction tx = fc.newTransaction()) {
export(teq, tx, expected, "0001", "abc");
export(teq, tx, expected, "0002", "def");
export(teq, tx, expected, "0003", "ghi");
tx.commit();
}
miniFluo.waitForObservers();
Assert.assertEquals(expected, getExports());
try (Transaction tx = fc.newTransaction()) {
export(teq, tx, expected, "0001", "xyz");
tx.commit();
}
miniFluo.waitForObservers();
Assert.assertEquals(expected, getExports());
try (Transaction tx = fc.newTransaction()) {
export(teq, tx, expected, "0001", "zzz");
tx.commit();
}
try (Transaction tx = fc.newTransaction()) {
export(teq, tx, expected, "0001", "mmm");
tx.commit();
}
miniFluo.waitForObservers();
Assert.assertEquals(expected, getExports());
Random rand = new Random(42);
for (int i = 0; i < 1000; i++) {
String k = String.format("%04d", rand.nextInt(100));
String v = String.format("%04d", rand.nextInt(10000));
try (Transaction tx = fc.newTransaction()) {
export(teq, tx, expected, k, v);
tx.commit();
}
}
miniFluo.waitForObservers();
Assert.assertEquals(expected, getExports());
}
}
private void export(ExportQueue<String, String> teq, Transaction tx, Map<String, String> expected,
String k, String v) {
teq.add(tx, k, v);
expected.put(k, v);
}
private Collection<Text> getFluoSplits() throws Exception {
return getAccumuloConnector().tableOperations()
.listSplits(getFluoConfiguration().getAccumuloTable());
}
private Map<String, String> getExports() throws Exception {
Scanner scanner = getAccumuloConnector().createScanner(exportTable, Authorizations.EMPTY);
Map<String, String> ret = new HashMap<>();
for (Entry<Key, Value> entry : scanner) {
String k = entry.getKey().getRowData().toString();
Assert.assertFalse(ret.containsKey(k));
ret.put(k, entry.getValue().toString());
}
return ret;
}
}
| 6,014 |
0 | Create_ds/fluo-recipes/modules/test/src/test/java/org/apache/fluo/recipes/test | Create_ds/fluo-recipes/modules/test/src/test/java/org/apache/fluo/recipes/test/export/AccumuloReplicatorIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.test.export;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.minicluster.MiniAccumuloCluster;
import org.apache.fluo.api.client.FluoClient;
import org.apache.fluo.api.client.FluoFactory;
import org.apache.fluo.api.client.Transaction;
import org.apache.fluo.api.mini.MiniFluo;
import org.apache.fluo.api.observer.ObserverProvider;
import org.apache.fluo.recipes.accumulo.export.AccumuloReplicator;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter;
import org.apache.fluo.recipes.core.export.ExportQueue;
import org.apache.fluo.recipes.core.transaction.RecordingTransaction;
import org.apache.fluo.recipes.core.transaction.TxLog;
import org.apache.fluo.recipes.core.types.StringEncoder;
import org.apache.fluo.recipes.core.types.TypeLayer;
import org.apache.fluo.recipes.core.types.TypedTransaction;
import org.apache.fluo.recipes.test.AccumuloExportITBase;
import org.junit.Assert;
import org.junit.Test;
public class AccumuloReplicatorIT extends AccumuloExportITBase {
private String exportTable;
public static final String QUEUE_ID = "repq";
private TypeLayer tl = new TypeLayer(new StringEncoder());
public static class AritObserverProvider implements ObserverProvider {
@Override
public void provide(Registry or, Context ctx) {
ExportQueue<String, TxLog> eq = ExportQueue.getInstance(QUEUE_ID, ctx.getAppConfiguration());
eq.registerObserver(or, new AccumuloExporter<>(QUEUE_ID, ctx.getAppConfiguration(),
AccumuloReplicator.getTranslator()));
}
}
@Override
public void preFluoInitHook() throws Exception {
// create and configure export table
exportTable = "export" + tableCounter.getAndIncrement();
getAccumuloConnector().tableOperations().create(exportTable);
MiniAccumuloCluster miniAccumulo = getMiniAccumuloCluster();
ExportQueue.configure(QUEUE_ID).keyType(String.class).valueType(TxLog.class).buckets(5)
.save(getFluoConfiguration());
AccumuloExporter.configure(QUEUE_ID)
.instance(miniAccumulo.getInstanceName(), miniAccumulo.getZooKeepers())
.credentials(ACCUMULO_USER, ACCUMULO_PASSWORD).table(exportTable)
.save(getFluoConfiguration());
getFluoConfiguration().setObserverProvider(AritObserverProvider.class);
}
@Test
public void testAccumuloReplicator() throws Exception {
ExportQueue<String, TxLog> eq =
ExportQueue.getInstance(QUEUE_ID, getFluoConfiguration().getAppConfiguration());
MiniFluo miniFluo = getMiniFluo();
try (FluoClient fc = FluoFactory.newClient(miniFluo.getClientConfiguration())) {
Map<String, String> expected = new HashMap<>();
try (Transaction tx = fc.newTransaction()) {
RecordingTransaction rtx = RecordingTransaction.wrap(tx, AccumuloReplicator.getFilter());
TypedTransaction ttx = tl.wrap(rtx);
write(ttx, expected, "k1", "v1");
write(ttx, expected, "k2", "v2");
write(ttx, expected, "k3", "v3");
eq.add(tx, "q1", rtx.getTxLog());
tx.commit();
}
miniFluo.waitForObservers();
Assert.assertEquals(expected, getExports());
try (Transaction tx = fc.newTransaction()) {
RecordingTransaction rtx = RecordingTransaction.wrap(tx, AccumuloReplicator.getFilter());
TypedTransaction ttx = tl.wrap(rtx);
write(ttx, expected, "k1", "v4");
delete(ttx, expected, "k3");
write(ttx, expected, "k2", "v5");
write(ttx, expected, "k4", "v6");
eq.add(tx, "q1", rtx.getTxLog());
tx.commit();
}
miniFluo.waitForObservers();
Assert.assertEquals(expected, getExports());
try (Transaction tx = fc.newTransaction()) {
RecordingTransaction rtx = RecordingTransaction.wrap(tx, AccumuloReplicator.getFilter());
TypedTransaction ttx = tl.wrap(rtx);
write(ttx, expected, "k2", "v7");
write(ttx, expected, "k3", "v8");
delete(ttx, expected, "k1");
delete(ttx, expected, "k4");
eq.add(tx, "q1", rtx.getTxLog());
tx.commit();
}
miniFluo.waitForObservers();
Assert.assertEquals(expected, getExports());
}
}
private void write(TypedTransaction ttx, Map<String, String> expected, String key, String value) {
ttx.mutate().row(key).fam("fam").qual("qual").set(value);
expected.put(key, value);
}
private void delete(TypedTransaction ttx, Map<String, String> expected, String key) {
ttx.mutate().row(key).fam("fam").qual("qual").delete();
expected.remove(key);
}
private Map<String, String> getExports() throws Exception {
Scanner scanner = getAccumuloConnector().createScanner(exportTable, Authorizations.EMPTY);
Map<String, String> ret = new HashMap<>();
for (Entry<Key, Value> entry : scanner) {
String k = entry.getKey().getRowData().toString();
Assert.assertFalse(ret.containsKey(k));
ret.put(k, entry.getValue().toString());
}
return ret;
}
}
| 6,015 |
0 | Create_ds/fluo-recipes/modules/test/src/main/java/org/apache/fluo/recipes | Create_ds/fluo-recipes/modules/test/src/main/java/org/apache/fluo/recipes/test/AccumuloExportITBase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.test;
import java.io.File;
import java.io.IOException;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.minicluster.MiniAccumuloCluster;
import org.apache.accumulo.minicluster.MiniAccumuloConfig;
import org.apache.commons.io.FileUtils;
import org.apache.fluo.api.client.FluoAdmin;
import org.apache.fluo.api.client.FluoFactory;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.mini.MiniFluo;
import org.apache.fluo.recipes.accumulo.ops.TableOperations;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
/**
* This class is intended to be extended by classes testing exporting from Fluo to Accumulo. Using
* MiniFluo by itself is easy. However, using MiniAccumulo and MiniFluo together involves writing a
* lot of boilerplate code. That's why this class exists: it's a place to put that boilerplate code.
*
* <p>
* Below is some example code showing how to use this class to write a test.
*
* <pre>
* <code>
* class MyExportIT extends AccumuloExportITBase {
*
* private String exportTable;
*
* public MyExportIT(){
* //indicate that MiniFluo should be started before each test
* super(true);
* }
*
* {@literal @}Override
* //this is method is called by super class before initializing Fluo
* public void preFluoInitHook() throws Exception {
*
* //create table to export to
* Connector conn = getAccumuloConnector();
* exportTable = "export" + tableCounter.getAndIncrement();
* conn.tableOperations().create(exportTable);
*
* //This config will be used to initialize Fluo
* FluoConfiguration fluoConfig = getFluoConfiguration();
*
* MiniAccumuloCluster miniAccumulo = getMiniAccumuloCluster();
* String instance = miniAccumulo.getInstanceName();
* String zookeepers = miniAccumulo.getZooKeepers();
* String user = ACCUMULO_USER;
* String password = ACCUMULO_PASSWORD;
*
* //Configure observers on fluoConfig to export using info above
* }
*
* {@literal @}Test
* public void exportTest1(){
* try(FluoClient client = FluoFactory.newClient(getFluoConfiguration())) {
* //write some data that will cause an observer to export data
* }
*
* getMiniFluo().waitForObservers();
*
* //verify data was exported
* }
* }
* </code>
* </pre>
*
* @since 1.0.0
*/
public class AccumuloExportITBase {
public static final String ACCUMULO_USER = "root";
public static final String ACCUMULO_PASSWORD = "secret";
private static File baseDir;
private static MiniAccumuloCluster cluster;
private FluoConfiguration fluoConfig;
private MiniFluo miniFluo;
protected static AtomicInteger tableCounter = new AtomicInteger(1);
private boolean startMiniFluo;
protected AccumuloExportITBase() {
this(true);
}
/**
* @param startMiniFluo passing true will cause MiniFluo to be started before each test. Passing
* false will cause Fluo to be initialized, but not started before each test.
*/
protected AccumuloExportITBase(boolean startMiniFluo) {
this.startMiniFluo = startMiniFluo;
}
@BeforeClass
public static void setupMiniAccumulo() throws Exception {
try {
// try to put in target dir
File targetDir = new File("target");
if (targetDir.exists() && targetDir.isDirectory()) {
baseDir = new File(targetDir, "accumuloExportIT-" + UUID.randomUUID());
} else {
baseDir = new File(FileUtils.getTempDirectory(), "accumuloExportIT-" + UUID.randomUUID());
}
FileUtils.deleteDirectory(baseDir);
MiniAccumuloConfig cfg = new MiniAccumuloConfig(baseDir, ACCUMULO_PASSWORD);
cluster = new MiniAccumuloCluster(cfg);
cluster.start();
} catch (IOException | InterruptedException e) {
throw new IllegalStateException(e);
}
}
@AfterClass
public static void tearDownMiniAccumulo() throws Exception {
cluster.stop();
FileUtils.deleteDirectory(baseDir);
}
@Before
public void setupMiniFluo() throws Exception {
resetFluoConfig();
preFluoInitHook();
FluoFactory.newAdmin(fluoConfig).initialize(
new FluoAdmin.InitializationOptions().setClearTable(true).setClearZookeeper(true));
postFluoInitHook();
if (startMiniFluo) {
miniFluo = FluoFactory.newMiniFluo(fluoConfig);
} else {
miniFluo = null;
}
}
@After
public void tearDownMiniFluo() throws Exception {
if (miniFluo != null) {
miniFluo.close();
miniFluo = null;
}
}
/**
* This method is intended to be overridden. The method is called before each test before Fluo is
* initialized.
*/
protected void preFluoInitHook() throws Exception {
}
/**
* This method is intended to be overridden. The method is called before each test after Fluo is
* initialized before MiniFluo is started.
*/
protected void postFluoInitHook() throws Exception {
TableOperations.optimizeTable(fluoConfig);
}
/**
* Retrieves MiniAccumuloCluster
*/
protected MiniAccumuloCluster getMiniAccumuloCluster() {
return cluster;
}
/**
* Retrieves MiniFluo
*/
protected synchronized MiniFluo getMiniFluo() {
return miniFluo;
}
/**
* Returns an Accumulo Connector to MiniAccumuloCluster
*/
protected Connector getAccumuloConnector() {
try {
return cluster.getConnector(ACCUMULO_USER, ACCUMULO_PASSWORD);
} catch (AccumuloException | AccumuloSecurityException e) {
throw new IllegalStateException(e);
}
}
/**
* Retrieves Fluo Configuration
*/
protected synchronized FluoConfiguration getFluoConfiguration() {
return fluoConfig;
}
/**
* A utility method that will set the configuration needed by Fluo from a given MiniCluster
*/
public static void configureFromMAC(FluoConfiguration fluoConfig, MiniAccumuloCluster cluster) {
fluoConfig.setMiniStartAccumulo(false);
fluoConfig.setAccumuloInstance(cluster.getInstanceName());
fluoConfig.setAccumuloUser("root");
fluoConfig.setAccumuloPassword(cluster.getConfig().getRootPassword());
fluoConfig.setInstanceZookeepers(cluster.getZooKeepers() + "/fluo");
fluoConfig.setAccumuloZookeepers(cluster.getZooKeepers());
}
private void resetFluoConfig() {
fluoConfig = new FluoConfiguration();
configureFromMAC(fluoConfig, cluster);
fluoConfig.setApplicationName("fluo-it");
fluoConfig.setAccumuloTable("fluo" + tableCounter.getAndIncrement());
}
}
| 6,016 |
0 | Create_ds/fluo-recipes/modules/test/src/main/java/org/apache/fluo/recipes | Create_ds/fluo-recipes/modules/test/src/main/java/org/apache/fluo/recipes/test/Hex.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.test;
import java.io.ByteArrayOutputStream;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.api.data.RowColumn;
import org.apache.fluo.api.data.RowColumnValue;
/**
* Utility code for encoding/decoding Non-Ascii bytes to/from String
*/
class Hex {
public static void encNonAscii(StringBuilder sb, Bytes bytes) {
for (int i = 0; i < bytes.length(); i++) {
byte b = bytes.byteAt(i);
if (b >= 32 && b <= 126 && b != '\\') {
sb.append((char) b);
} else {
sb.append(String.format("\\x%02x", b & 0xff));
}
}
}
public static String encNonAscii(Bytes bytes) {
StringBuilder sb = new StringBuilder();
encNonAscii(sb, bytes);
return sb.toString();
}
public static void encNonAscii(StringBuilder sb, Column c, String sep) {
encNonAscii(sb, c.getFamily());
sb.append(sep);
encNonAscii(sb, c.getQualifier());
}
public static void encNonAscii(StringBuilder sb, RowColumn rc, String sep) {
encNonAscii(sb, rc.getRow());
sb.append(sep);
encNonAscii(sb, rc.getColumn(), sep);
}
public static String encNonAscii(RowColumnValue rcv, String sep) {
StringBuilder sb = new StringBuilder();
encNonAscii(sb, rcv.getRow());
sb.append(sep);
encNonAscii(sb, rcv.getColumn(), sep);
sb.append(sep);
encNonAscii(sb, rcv.getValue());
return sb.toString();
}
static byte[] decode(String s) {
// the next best thing to a StringBuilder for bytes
ByteArrayOutputStream baos = new ByteArrayOutputStream(s.length());
for (int i = 0; i < s.length(); i++) {
byte b;
if (s.charAt(i) == '\\') {
if (s.charAt(i + 1) != 'x') {
throw new IllegalArgumentException();
}
String num = "" + s.charAt(i + 2) + s.charAt(i + 3);
b = (byte) (0xff & Integer.parseInt(num, 16));
i += 3;
} else {
char c = s.charAt(i);
if (c < 32 || c > 126) {
throw new IllegalArgumentException();
}
b = (byte) (0xff & c);
}
baos.write(b);
}
return baos.toByteArray();
}
}
| 6,017 |
0 | Create_ds/fluo-recipes/modules/test/src/main/java/org/apache/fluo/recipes | Create_ds/fluo-recipes/modules/test/src/main/java/org/apache/fluo/recipes/test/FluoITHelper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import org.apache.accumulo.core.client.AccumuloClient;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.fluo.api.client.FluoClient;
import org.apache.fluo.api.client.FluoFactory;
import org.apache.fluo.api.client.Snapshot;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.api.data.RowColumnValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Helper for creating integration tests that connect to a MiniFluo/MiniAccumuloCluster instance.
*
* @since 1.0.0
*/
public class FluoITHelper {
private static final Logger log = LoggerFactory.getLogger(FluoITHelper.class);
/**
* Prints list of RowColumnValue objects
*
* @param rcvList RowColumnValue list
*/
public static void printRowColumnValues(Collection<RowColumnValue> rcvList) {
System.out.println("== RDD start ==");
rcvList.forEach(rcv -> System.out.println("rc " + Hex.encNonAscii(rcv, " ")));
System.out.println("== RDD end ==");
}
public static void printFluoTable(FluoConfiguration conf) {
try (FluoClient client = FluoFactory.newClient(conf)) {
printFluoTable(client);
}
}
/**
* Prints Fluo table accessible using provided client
*
* @param client Fluo client to table
*/
public static void printFluoTable(FluoClient client) {
try (Snapshot s = client.newSnapshot()) {
System.out.println("== fluo start ==");
for (RowColumnValue rcv : s.scanner().build()) {
StringBuilder sb = new StringBuilder();
Hex.encNonAscii(sb, rcv.getRow());
sb.append(" ");
Hex.encNonAscii(sb, rcv.getColumn(), " ");
sb.append("\t");
Hex.encNonAscii(sb, rcv.getValue());
System.out.println(sb.toString());
}
System.out.println("=== fluo end ===");
}
}
// @formatter:off
public static boolean verifyFluoTable(FluoConfiguration conf,
Collection<RowColumnValue> expected) {
// @formatter:on
try (FluoClient client = FluoFactory.newClient(conf)) {
return verifyFluoTable(client, expected);
}
}
/**
* Verifies that the actual data in provided Fluo instance matches expected data
*
* @param client Fluo client to instance with actual data
* @param expected RowColumnValue list containing expected data
* @return True if actual data matches expected data
*/
public static boolean verifyFluoTable(FluoClient client, Collection<RowColumnValue> expected) {
expected = sort(expected);
try (Snapshot s = client.newSnapshot()) {
Iterator<RowColumnValue> fluoIter = s.scanner().build().iterator();
Iterator<RowColumnValue> rcvIter = expected.iterator();
while (fluoIter.hasNext() && rcvIter.hasNext()) {
RowColumnValue actualRcv = fluoIter.next();
RowColumnValue rcv = rcvIter.next();
boolean retval = diff("fluo row", rcv.getRow(), actualRcv.getRow());
retval |= diff("fluo fam", rcv.getColumn().getFamily(), actualRcv.getColumn().getFamily());
retval |=
diff("fluo qual", rcv.getColumn().getQualifier(), actualRcv.getColumn().getQualifier());
retval |= diff("fluo val", rcv.getValue(), actualRcv.getValue());
if (retval) {
log.error("Difference found - row {} cf {} cq {} val {}", rcv.getsRow(),
rcv.getColumn().getsFamily(), rcv.getColumn().getsQualifier(), rcv.getsValue());
return false;
}
log.debug("Verified {}", Hex.encNonAscii(rcv, " "));
}
if (fluoIter.hasNext() || rcvIter.hasNext()) {
log.error("An iterator still has more data");
return false;
}
log.debug("Actual data matched expected data");
return true;
}
}
/**
* Prints specified Accumulo table (accessible using Accumulo connector parameter)
*
* @param conn Accumulo connector of to instance with table to print
* @param accumuloTable Accumulo table to print
* @deprecated since 1.3.0 use {@link #printAccumuloTable(AccumuloClient, String)}
*/
@Deprecated(since = "1.3.0", forRemoval = true)
public static void printAccumuloTable(Connector conn, String accumuloTable) {
Scanner scanner = null;
try {
scanner = conn.createScanner(accumuloTable, Authorizations.EMPTY);
} catch (TableNotFoundException e) {
throw new IllegalStateException(e);
}
Iterator<Map.Entry<Key, Value>> iterator = scanner.iterator();
System.out.println("== accumulo start ==");
while (iterator.hasNext()) {
Map.Entry<Key, Value> entry = iterator.next();
System.out.println(entry.getKey() + " " + entry.getValue());
}
System.out.println("== accumulo end ==");
}
/**
* Prints specified Accumulo table
*
* @param client Accumulo clientto instance with table to print
* @param accumuloTable Accumulo table to print
*
* @since 1.3.0
*/
@SuppressWarnings("deprecation")
public static void printAccumuloTable(AccumuloClient client, String accumuloTable) {
try {
printAccumuloTable(Connector.from(client), accumuloTable);
} catch (AccumuloSecurityException | AccumuloException e) {
throw new RuntimeException(e);
}
}
private static boolean diff(String dataType, String expected, String actual) {
if (!expected.equals(actual)) {
log.error("Difference found in {} - expected {} actual {}", dataType, expected, actual);
return true;
}
return false;
}
private static boolean diff(String dataType, Bytes expected, Bytes actual) {
if (!expected.equals(actual)) {
log.error("Difference found in {} - expected {} actual {}", dataType,
Hex.encNonAscii(expected), Hex.encNonAscii(actual));
return true;
}
return false;
}
/**
* Verifies that actual data in Accumulo table matches expected data
*
* @param conn Connector to Accumulo instance with actual data
* @param accumuloTable Accumulo table with actual data
* @param expected RowColumnValue list containing expected data
* @return True if actual data matches expected data
* @deprecated since 1.3.0 use {@link #verifyAccumuloTable(AccumuloClient, String, Collection)}
*/
@Deprecated(since = "1.3.0", forRemoval = true)
public static boolean verifyAccumuloTable(Connector conn, String accumuloTable,
Collection<RowColumnValue> expected) {
expected = sort(expected);
Scanner scanner;
try {
scanner = conn.createScanner(accumuloTable, Authorizations.EMPTY);
} catch (TableNotFoundException e) {
throw new IllegalStateException(e);
}
Iterator<Map.Entry<Key, Value>> scanIter = scanner.iterator();
Iterator<RowColumnValue> rcvIter = expected.iterator();
while (scanIter.hasNext() && rcvIter.hasNext()) {
RowColumnValue rcv = rcvIter.next();
Map.Entry<Key, Value> kvEntry = scanIter.next();
Key key = kvEntry.getKey();
Column col = rcv.getColumn();
boolean retval = diff("row", rcv.getRow().toString(), key.getRow().toString());
retval |= diff("fam", col.getFamily().toString(), key.getColumnFamily().toString());
retval |= diff("qual", col.getQualifier().toString(), key.getColumnQualifier().toString());
retval |= diff("val", rcv.getValue().toString(), kvEntry.getValue().toString());
if (retval) {
log.error("Difference found - row {} cf {} cq {} val {}", rcv.getRow().toString(),
col.getFamily().toString(), col.getQualifier().toString(), rcv.getValue().toString());
return false;
}
log.debug("Verified row {} cf {} cq {} val {}", rcv.getRow().toString(),
col.getFamily().toString(), col.getQualifier().toString(), rcv.getValue().toString());
}
if (scanIter.hasNext() || rcvIter.hasNext()) {
log.error("An iterator still has more data");
return false;
}
log.debug("Actual data matched expected data");
return true;
}
/**
* Verifies that actual data in Accumulo table matches expected data
*
* @param client Client from Accumulo instance with actual data
* @param accumuloTable Accumulo table with actual data
* @param expected RowColumnValue list containing expected data
* @return True if actual data matches expected data
*
* @since 1.3.0
*/
@SuppressWarnings("deprecation")
public static boolean verifyAccumuloTable(AccumuloClient client, String accumuloTable,
Collection<RowColumnValue> expected) {
try {
return verifyAccumuloTable(Connector.from(client), accumuloTable, expected);
} catch (AccumuloSecurityException | AccumuloException e) {
throw new RuntimeException(e);
}
}
/**
* Verifies that expected list of RowColumnValues matches actual
*
* @param expected RowColumnValue list containing expected data
* @param actual RowColumnValue list containing actual data
* @return True if actual data matches expected data
*/
public static boolean verifyRowColumnValues(Collection<RowColumnValue> expected,
Collection<RowColumnValue> actual) {
expected = sort(expected);
actual = sort(actual);
Iterator<RowColumnValue> expectIter = expected.iterator();
Iterator<RowColumnValue> actualIter = actual.iterator();
while (expectIter.hasNext() && actualIter.hasNext()) {
RowColumnValue expRcv = expectIter.next();
RowColumnValue actRcv = actualIter.next();
boolean retval = diff("rcv row", expRcv.getRow(), actRcv.getRow());
retval |= diff("rcv fam", expRcv.getColumn().getFamily(), actRcv.getColumn().getFamily());
retval |=
diff("rcv qual", expRcv.getColumn().getQualifier(), actRcv.getColumn().getQualifier());
retval |= diff("rcv val", expRcv.getValue(), actRcv.getValue());
if (retval) {
log.error("Difference found in RowColumnValue lists - expected {} actual {}", expRcv,
actRcv);
return false;
}
log.debug("Verified row/col/val: {}", expRcv);
}
if (expectIter.hasNext() || actualIter.hasNext()) {
log.error("A RowColumnValue list iterator still has more data");
return false;
}
log.debug("Actual data matched expected data");
return true;
}
private static List<RowColumnValue> sort(Collection<RowColumnValue> input) {
ArrayList<RowColumnValue> copy = new ArrayList<>(input);
Collections.sort(copy);
return copy;
}
/**
* A helper method for parsing test data. Each string passed in is expected to have the following
* format {@literal <row>|<family>|<qualifier>|<value>}
*/
public static List<RowColumnValue> parse(String... data) {
return parse(Splitter.on('|'), data);
}
/**
* A helper method for parsing test data. Each string passed in is split using the specified
* splitter into four fields for row, family, qualifier, and value.
*/
public static List<RowColumnValue> parse(Splitter splitter, String... data) {
ArrayList<RowColumnValue> ret = new ArrayList<>();
for (String line : data) {
Iterable<String> cols = splitter.split(line);
if (Iterables.size(cols) != 4) {
throw new IllegalArgumentException("Bad input " + line);
}
Iterator<String> iter = cols.iterator();
RowColumnValue rcv = new RowColumnValue(Bytes.of(iter.next()),
new Column(iter.next(), iter.next()), Bytes.of(iter.next()));
ret.add(rcv);
}
return ret;
}
}
| 6,018 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/test/java/org/apache/fluo/recipes/accumulo | Create_ds/fluo-recipes/modules/accumulo/src/test/java/org/apache/fluo/recipes/accumulo/export/AccumuloTranslatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.export;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import org.apache.accumulo.core.data.Mutation;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.api.data.RowColumn;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloTranslator;
import org.junit.Assert;
import org.junit.Test;
public class AccumuloTranslatorTest {
public static Map<RowColumn, Bytes> genData(String key, Optional<String> val) {
if (!val.isPresent()) {
return Collections.emptyMap();
}
Map<RowColumn, Bytes> rcMap = new HashMap<>();
String data = val.get();
for (int i = 0; i < data.length(); i++) {
char c = data.charAt(i);
rcMap.put(new RowColumn("r:" + key, new Column("cf:" + c)), Bytes.of("v:" + c));
}
return rcMap;
}
public static void genMutations(String key, long seq, Optional<String> oldVal,
Optional<String> newVal, Consumer<Mutation> consumer) {
AccumuloTranslator.generateMutations(seq, genData(key, oldVal), genData(key, newVal), consumer);
}
public static Mutation makePut(String key, String val, long seq) {
Mutation m = new Mutation("r:" + key);
addPut(m, key, val, seq);
return m;
}
public static void addPut(Mutation m, String key, String val, long seq) {
m.put("cf:" + val, "", seq, "v:" + val);
}
public static Mutation makeDel(String key, String val, long seq) {
Mutation m = new Mutation("r:" + key);
addDel(m, key, val, seq);
return m;
}
public static void addDel(Mutation m, String key, String val, long seq) {
m.putDelete("cf:" + val, "", seq);
}
@Test
public void testDifferenceExport() {
final Collection<Mutation> mutations = new ArrayList<>();
Consumer<Mutation> consumer = mutations::add;
genMutations("k1", 1, Optional.empty(), Optional.of("a"), consumer);
Assert.assertEquals(1, mutations.size());
Assert.assertTrue(mutations.contains(makePut("k1", "a", 1)));
mutations.clear();
genMutations("k2", 2, Optional.of("ab"), Optional.of("ab"), consumer);
Assert.assertEquals(0, mutations.size());
mutations.clear();
genMutations("k2", 2, Optional.of("b"), Optional.of("ab"), consumer);
Assert.assertEquals(1, mutations.size());
Assert.assertTrue(mutations.contains(makePut("k2", "a", 2)));
mutations.clear();
genMutations("k3", 3, Optional.of("c"), Optional.of("d"), consumer);
Assert.assertEquals(1, mutations.size());
Mutation m = makeDel("k3", "c", 3);
addPut(m, "k3", "d", 3);
Assert.assertTrue(mutations.contains(m));
mutations.clear();
genMutations("k4", 4, Optional.of("e"), Optional.empty(), consumer);
Assert.assertEquals(1, mutations.size());
Assert.assertTrue(mutations.contains(makeDel("k4", "e", 4)));
mutations.clear();
genMutations("k5", 5, Optional.of("ef"), Optional.of("fg"), consumer);
Assert.assertEquals(1, mutations.size());
m = makeDel("k5", "e", 5);
addPut(m, "k5", "g", 5);
Assert.assertTrue(mutations.contains(m));
mutations.clear();
}
}
| 6,019 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/cmds/OptimizeTable.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.cmds;
import javax.inject.Inject;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.recipes.accumulo.ops.TableOperations;
/**
* @since 1.0.0
*/
public class OptimizeTable {
// when run with fluo exec command, the applications fluo config will be injected
@Inject
private static FluoConfiguration fluoConfig;
public static void main(String[] args) throws Exception {
if (args.length != 0) {
System.out.println("Usage : " + OptimizeTable.class.getName());
System.exit(-1);
}
TableOperations.optimizeTable(fluoConfig);
System.out.println("Finished optimizing table");
}
}
| 6,020 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/cmds/CompactTransient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.cmds;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import org.apache.fluo.api.client.FluoClient;
import org.apache.fluo.api.client.FluoFactory;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.recipes.accumulo.ops.TableOperations;
import org.apache.fluo.recipes.core.common.RowRange;
import org.apache.fluo.recipes.core.common.TransientRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @since 1.0.0
*/
public class CompactTransient {
// when run with fluo exec command, the applications fluo config will be injected
@Inject
private static FluoConfiguration fluoConfig;
private static ScheduledExecutorService schedExecutor;
private static Logger log = LoggerFactory.getLogger(CompactTransient.class);
private static class CompactTask implements Runnable {
private RowRange transientRange;
private long requestedSleepTime;
private double multiplier;
public CompactTask(RowRange transientRange, long requestedSleepTime, double multiplier) {
this.transientRange = transientRange;
this.requestedSleepTime = requestedSleepTime;
this.multiplier = multiplier;
}
@Override
public void run() {
long t1 = System.currentTimeMillis();
try {
TableOperations.compactTransient(fluoConfig, transientRange);
} catch (Exception e) {
log.warn("Compaction of " + transientRange + " failed ", e);
}
long t2 = System.currentTimeMillis();
long sleepTime = Math.max((long) (multiplier * (t2 - t1)), requestedSleepTime);
if (requestedSleepTime > 0) {
log.info("Compacted {} in {}ms sleeping {}ms", transientRange, t2 - t1, sleepTime);
schedExecutor.schedule(new CompactTask(transientRange, requestedSleepTime, multiplier),
sleepTime, TimeUnit.MILLISECONDS);
} else {
log.info("Compacted {} in {}ms", transientRange, t2 - t1);
}
}
}
public static void main(String[] args) throws Exception {
if ((args.length == 1 && args[0].startsWith("-h")) || (args.length > 2)) {
System.out
.println("Usage : " + CompactTransient.class.getName() + " [<interval> [<multiplier>]]");
System.exit(-1);
}
int interval = 0;
double multiplier = 3;
if (args.length >= 1) {
interval = Integer.parseInt(args[0]);
if (args.length == 2) {
multiplier = Double.parseDouble(args[1]);
}
}
if (interval > 0) {
schedExecutor = Executors.newScheduledThreadPool(1);
}
List<RowRange> transientRanges;
try (FluoClient client = FluoFactory.newClient(fluoConfig)) {
SimpleConfiguration appConfig = client.getAppConfiguration();
TransientRegistry tr = new TransientRegistry(appConfig);
transientRanges = tr.getTransientRanges();
for (RowRange transientRange : transientRanges) {
if (interval > 0) {
schedExecutor.execute(new CompactTask(transientRange, interval * 1000, multiplier));
} else {
new CompactTask(transientRange, 0, 0).run();
}
}
}
if (interval > 0) {
while (true) {
Thread.sleep(10000);
}
}
}
}
| 6,021 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/ops/TableOperations.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.ops;
import java.util.List;
import java.util.TreeSet;
import org.apache.accumulo.core.client.Accumulo;
import org.apache.accumulo.core.client.AccumuloClient;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.fluo.api.client.FluoClient;
import org.apache.fluo.api.client.FluoFactory;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.recipes.core.common.RowRange;
import org.apache.fluo.recipes.core.common.TableOptimizations;
import org.apache.fluo.recipes.core.common.TransientRegistry;
import org.apache.hadoop.io.Text;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility methods for operating on the Fluo table used by recipes.
*
* @since 1.0.0
*/
public class TableOperations {
private static final String RGB_CLASS =
"org.apache.accumulo.server.master.balancer.RegexGroupBalancer";
private static final String RGB_PATTERN_PROP = "table.custom.balancer.group.regex.pattern";
private static final String RGB_DEFAULT_PROP = "table.custom.balancer.group.regex.default";
private static final String TABLE_BALANCER_PROP = "table.balancer";
private static final Logger logger = LoggerFactory.getLogger(TableOperations.class);
private static AccumuloClient getClient(FluoConfiguration fluoConfig) throws Exception {
return Accumulo.newClient()
.to(fluoConfig.getAccumuloInstance(), fluoConfig.getAccumuloZookeepers())
.as(fluoConfig.getAccumuloUser(), fluoConfig.getAccumuloPassword()).build();
}
/**
* Make the requested table optimizations.
*
* @param fluoConfig should contain information need to connect to Accumulo and name of Fluo table
* @param tableOptim Will perform these optimizations on Fluo table in Accumulo.
*/
public static void optimizeTable(FluoConfiguration fluoConfig, TableOptimizations tableOptim)
throws Exception {
try (AccumuloClient client = getClient(fluoConfig)) {
TreeSet<Text> splits = new TreeSet<>();
for (Bytes split : tableOptim.getSplits()) {
splits.add(new Text(split.toArray()));
}
String table = fluoConfig.getAccumuloTable();
client.tableOperations().addSplits(table, splits);
if (tableOptim.getTabletGroupingRegex() != null
&& !tableOptim.getTabletGroupingRegex().isEmpty()) {
// was going to call :
// conn.instanceOperations().testClassLoad(RGB_CLASS, TABLET_BALANCER_CLASS)
// but that failed. See ACCUMULO-4068
try {
// setting this prop first intentionally because it should fail in 1.6
client.tableOperations().setProperty(table, RGB_PATTERN_PROP,
tableOptim.getTabletGroupingRegex());
client.tableOperations().setProperty(table, RGB_DEFAULT_PROP, "none");
client.tableOperations().setProperty(table, TABLE_BALANCER_PROP, RGB_CLASS);
} catch (AccumuloException e) {
logger
.warn("Unable to setup regex balancer (this is expected to fail in Accumulo 1.6.X) : "
+ e.getMessage());
logger.debug(
"Unable to setup regex balancer (this is expected to fail in Accumulo 1.6.X)", e);
}
}
}
}
/**
* This method will perform all registered table optimizations. It will call
* {@link TableOptimizations#getConfiguredOptimizations(FluoConfiguration)} to obtain
* optimizations to perform.
*/
public static void optimizeTable(FluoConfiguration fluoConfig) throws Exception {
TableOptimizations tableOptim = TableOptimizations.getConfiguredOptimizations(fluoConfig);
optimizeTable(fluoConfig, tableOptim);
}
/**
* Compact all transient regions that were registered using {@link TransientRegistry}
*/
public static void compactTransient(FluoConfiguration fluoConfig) throws Exception {
try (AccumuloClient aclient = getClient(fluoConfig);
FluoClient fclient = FluoFactory.newClient(fluoConfig)) {
SimpleConfiguration appConfig = fclient.getAppConfiguration();
TransientRegistry transientRegistry = new TransientRegistry(appConfig);
List<RowRange> ranges = transientRegistry.getTransientRanges();
for (RowRange r : ranges) {
long t1 = System.currentTimeMillis();
aclient.tableOperations().compact(fluoConfig.getAccumuloTable(),
new Text(r.getStart().toArray()), new Text(r.getEnd().toArray()), true, true);
long t2 = System.currentTimeMillis();
logger.info("Compacted {} in {}ms", r, (t2 - t1));
}
}
}
public static void compactTransient(FluoConfiguration fluoConfig, RowRange tRange)
throws Exception {
try (AccumuloClient client = getClient(fluoConfig)) {
client.tableOperations().compact(fluoConfig.getAccumuloTable(),
new Text(tRange.getStart().toArray()), new Text(tRange.getEnd().toArray()), true, true);
}
}
}
| 6,022 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export/AccumuloReplicator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.export;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Predicate;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloTranslator;
import org.apache.fluo.recipes.core.export.SequencedExport;
import org.apache.fluo.recipes.core.transaction.LogEntry;
import org.apache.fluo.recipes.core.transaction.RecordingTransaction;
import org.apache.fluo.recipes.core.transaction.TxLog;
/**
* Supports replicating data to Accumulo using a {@link TxLog}. The method {@link #getTranslator()}
* can be used with {@link org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter} to
* export {@link TxLog} objects.
*/
@SuppressWarnings("deprecation")
public class AccumuloReplicator extends AccumuloExporter<String, TxLog> {
/**
* @deprecated since 1.1.0 use
* {@link org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter} with
* {@link #getTranslator()} instead.
*/
@Deprecated
@Override
protected void translate(SequencedExport<String, TxLog> export, Consumer<Mutation> consumer) {
generateMutations(export.getSequence(), export.getValue(), consumer);
}
/**
* Returns LogEntry filter for Accumulo replication.
*
* @see RecordingTransaction#wrap(org.apache.fluo.api.client.TransactionBase, Predicate)
*/
public static Predicate<LogEntry> getFilter() {
return le -> le.getOp().equals(LogEntry.Operation.DELETE)
|| le.getOp().equals(LogEntry.Operation.SET);
}
/**
* @return A translator from TxLog to Mutations
* @since 1.1.0
*/
public static AccumuloTranslator<String, TxLog> getTranslator() {
return (export, consumer) -> generateMutations(export.getSequence(), export.getValue(),
consumer);
}
/**
* Generates Accumulo mutations from a Transaction log. Used to Replicate Fluo table to Accumulo.
*
* @param txLog Transaction log
* @param seq Export sequence number
* @param consumer generated mutations will be output to this consumer
*/
public static void generateMutations(long seq, TxLog txLog, Consumer<Mutation> consumer) {
Map<Bytes, Mutation> mutationMap = new HashMap<>();
for (LogEntry le : txLog.getLogEntries()) {
LogEntry.Operation op = le.getOp();
Column col = le.getColumn();
byte[] cf = col.getFamily().toArray();
byte[] cq = col.getQualifier().toArray();
byte[] cv = col.getVisibility().toArray();
if (op.equals(LogEntry.Operation.DELETE) || op.equals(LogEntry.Operation.SET)) {
Mutation m = mutationMap.computeIfAbsent(le.getRow(), k -> new Mutation(k.toArray()));
if (op.equals(LogEntry.Operation.DELETE)) {
if (col.isVisibilitySet()) {
m.putDelete(cf, cq, new ColumnVisibility(cv), seq);
} else {
m.putDelete(cf, cq, seq);
}
} else {
if (col.isVisibilitySet()) {
m.put(cf, cq, new ColumnVisibility(cv), seq, le.getValue().toArray());
} else {
m.put(cf, cq, seq, le.getValue().toArray());
}
}
}
}
mutationMap.values().forEach(consumer);
}
}
| 6,023 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export/AccumuloExporter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.export;
import java.util.Iterator;
import java.util.Map;
import java.util.function.Consumer;
import org.apache.accumulo.core.data.Mutation;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.RowColumn;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloTranslator;
import org.apache.fluo.recipes.core.export.SequencedExport;
/**
* An Accumulo-specific {@link org.apache.fluo.recipes.core.export.Exporter} that writes mutations
* to Accumulo. For an overview of how to use this, see the project level documentation for
* exporting to Accumulo.
*
* @since 1.0.0
* @deprecated since 1.1.0, replaced by
* {@link org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter} and
* {@link AccumuloTranslator}
*/
@Deprecated
public abstract class AccumuloExporter<K, V>
extends org.apache.fluo.recipes.core.export.Exporter<K, V> {
/**
* Use this to configure the Accumulo table where an AccumuloExporter's mutations will be written.
* Create and pass to
* {@link org.apache.fluo.recipes.core.export.ExportQueue.Options#setExporterConfiguration(SimpleConfiguration)}
*
* @since 1.0.0
*/
public static class Configuration extends SimpleConfiguration {
private static final long serialVersionUID = 1L;
public Configuration(String instanceName, String zookeepers, String user, String password,
String table) {
super.setProperty("instanceName", instanceName);
super.setProperty("zookeepers", zookeepers);
super.setProperty("user", user);
super.setProperty("password", password);
super.setProperty("table", table);
}
}
private org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter<K, V> accumuloWriter;
@Override
public void init(org.apache.fluo.recipes.core.export.Exporter.Context context) throws Exception {
SimpleConfiguration sc = context.getExporterConfiguration();
String instanceName = sc.getString("instanceName");
String zookeepers = sc.getString("zookeepers");
String user = sc.getString("user");
String password = sc.getString("password");
String table = sc.getString("table");
FluoConfiguration tmpFc = new FluoConfiguration();
org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter.configure("aecfgid")
.instance(instanceName, zookeepers).credentials(user, password).table(table).save(tmpFc);
accumuloWriter = new org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter<K, V>(
"aecfgid", tmpFc.getAppConfiguration(), this::translate);
}
@Override
protected void processExports(Iterator<SequencedExport<K, V>> exports) {
accumuloWriter.export(exports);
}
/**
* Implementations of this method should translate the given SequencedExport to 0 or more
* Mutations.
*
* @param export the input that should be translated to mutations
* @param consumer output mutations to this consumer
*/
protected abstract void translate(SequencedExport<K, V> export, Consumer<Mutation> consumer);
/**
* Generates Accumulo mutations by comparing the differences between a RowColumn/Bytes map that is
* generated for old and new data and represents how the data should exist in Accumulo. When
* comparing each row/column/value (RCV) of old and new data, mutations are generated using the
* following rules:
* <ul>
* <li>If old and new data have the same RCV, nothing is done.
* <li>If old and new data have same row/column but different values, an update mutation is
* created for the row/column.
* <li>If old data has a row/column that is not in the new data, a delete mutation is generated.
* <li>If new data has a row/column that is not in the old data, an insert mutation is generated.
* <li>Only one mutation is generated per row.
* <li>The export sequence number is used for the timestamp in the mutation.
* </ul>
*
* @param consumer generated mutations will be output to this consumer
* @param oldData Map containing old row/column data
* @param newData Map containing new row/column data
* @param seq Export sequence number
* @deprecated since 1.1.0 use
* {@link AccumuloTranslator#generateMutations(long, Map, Map, Consumer)}
*/
@Deprecated
public static void generateMutations(long seq, Map<RowColumn, Bytes> oldData,
Map<RowColumn, Bytes> newData, Consumer<Mutation> consumer) {
AccumuloTranslator.generateMutations(seq, oldData, newData, consumer);
}
}
| 6,024 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export/function/AccumuloTranslator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.export.function;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
import org.apache.accumulo.core.data.Mutation;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.api.data.RowColumn;
import org.apache.fluo.recipes.core.export.SequencedExport;
/**
* This interface is used by {@link AccumuloExporter} to translated exports into Accumulo mutations.
*
* @see AccumuloExporter
* @since 1.1.0
*/
@FunctionalInterface
public interface AccumuloTranslator<K, V> {
/**
* This function should convert the export to zero or more mutations, passing the mutations to the
* consumer.
*/
void translate(SequencedExport<K, V> export, Consumer<Mutation> mutationWriter);
/**
* Generates Accumulo mutations by comparing the differences between a RowColumn/Bytes map that is
* generated for old and new data and represents how the data should exist in Accumulo. When
* comparing each row/column/value (RCV) of old and new data, mutations are generated using the
* following rules:
* <ul>
* <li>If old and new data have the same RCV, nothing is done.
* <li>If old and new data have same row/column but different values, an update mutation is
* created for the row/column.
* <li>If old data has a row/column that is not in the new data, a delete mutation is generated.
* <li>If new data has a row/column that is not in the old data, an insert mutation is generated.
* <li>Only one mutation is generated per row.
* <li>The export sequence number is used for the timestamp in the mutation.
* </ul>
*
* @param consumer generated mutations will be output to this consumer
* @param oldData Map containing old row/column data
* @param newData Map containing new row/column data
* @param seq Export sequence number
*/
public static void generateMutations(long seq, Map<RowColumn, Bytes> oldData,
Map<RowColumn, Bytes> newData, Consumer<Mutation> consumer) {
Map<Bytes, Mutation> mutationMap = new HashMap<>();
for (Map.Entry<RowColumn, Bytes> entry : oldData.entrySet()) {
RowColumn rc = entry.getKey();
if (!newData.containsKey(rc)) {
Mutation m = mutationMap.computeIfAbsent(rc.getRow(), r -> new Mutation(r.toArray()));
m.putDelete(rc.getColumn().getFamily().toArray(), rc.getColumn().getQualifier().toArray(),
seq);
}
}
for (Map.Entry<RowColumn, Bytes> entry : newData.entrySet()) {
RowColumn rc = entry.getKey();
Column col = rc.getColumn();
Bytes newVal = entry.getValue();
Bytes oldVal = oldData.get(rc);
if (oldVal == null || !oldVal.equals(newVal)) {
Mutation m = mutationMap.computeIfAbsent(rc.getRow(), r -> new Mutation(r.toArray()));
m.put(col.getFamily().toArray(), col.getQualifier().toArray(), seq, newVal.toArray());
}
}
mutationMap.values().forEach(consumer);
}
}
| 6,025 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export/function/AeFluentConfigurator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.export.function;
import java.util.Objects;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter.CredentialArgs;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter.InstanceArgs;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter.Options;
import org.apache.fluo.recipes.accumulo.export.function.AccumuloExporter.TableArgs;
// Intentionally package private
class AeFluentConfigurator implements InstanceArgs, CredentialArgs, TableArgs, Options {
private String id;
private String instance;
private String zookeepers;
private String user;
private String password;
private String table;
private static final String PREFIX = "recipes.accumulo.writer.";
AeFluentConfigurator(String id) {
this.id = id;
}
@Override
public void save(FluoConfiguration fluoConf) {
SimpleConfiguration appConfig = fluoConf.getAppConfiguration();
// TODO Auto-generated method stub
appConfig.setProperty(PREFIX + id + ".instance", instance);
appConfig.setProperty(PREFIX + id + ".zookeepers", zookeepers);
appConfig.setProperty(PREFIX + id + ".user", user);
appConfig.setProperty(PREFIX + id + ".password", password);
appConfig.setProperty(PREFIX + id + ".table", table);
}
@Override
public Options table(String tableName) {
this.table = Objects.requireNonNull(tableName);
return this;
}
@Override
public TableArgs credentials(String user, String password) {
this.user = Objects.requireNonNull(user);
this.password = Objects.requireNonNull(password);
return this;
}
@Override
public CredentialArgs instance(String instanceName, String zookeepers) {
this.instance = Objects.requireNonNull(instanceName);
this.zookeepers = Objects.requireNonNull(zookeepers);
return this;
}
String getInstance() {
return instance;
}
String getZookeepers() {
return zookeepers;
}
String getUser() {
return user;
}
String getPassword() {
return password;
}
String getTable() {
return table;
}
public static AeFluentConfigurator load(String id, SimpleConfiguration config) {
AeFluentConfigurator aefc = new AeFluentConfigurator(id);
aefc.instance = config.getString(PREFIX + id + ".instance");
aefc.zookeepers = config.getString(PREFIX + id + ".zookeepers");
aefc.user = config.getString(PREFIX + id + ".user");
aefc.password = config.getString(PREFIX + id + ".password");
aefc.table = config.getString(PREFIX + id + ".table");
return aefc;
}
}
| 6,026 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export/function/AccumuloExporter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.export.function;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.function.Consumer;
import org.apache.accumulo.core.data.Mutation;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.api.observer.ObserverProvider;
import org.apache.fluo.recipes.core.export.ExportQueue;
import org.apache.fluo.recipes.core.export.SequencedExport;
import org.apache.fluo.recipes.core.export.function.Exporter;
/**
* An Accumulo-specific {@link Exporter} that writes mutations to Accumulo. For an overview of how
* to use this, see the project level documentation for exporting to Accumulo.
*
* @see ExportQueue#registerObserver(ObserverProvider.Registry, Exporter)
* @since 1.1.0
*/
public class AccumuloExporter<K, V> implements Exporter<K, V> {
private AccumuloTranslator<K, V> translator;
private AccumuloWriter writer;
/**
* Part of a fluent configuration API.
*
* @since 1.1.0
*/
public static interface InstanceArgs {
CredentialArgs instance(String instanceName, String zookeepers);
}
/**
* Part of a fluent configuration API.
*
* @since 1.1.0
*/
public static interface CredentialArgs {
TableArgs credentials(String user, String password);
}
/**
* Part of a fluent configuration API.
*
* @since 1.1.0
*/
public static interface TableArgs {
Options table(String tableName);
}
/**
* Part of a fluent configuration API.
*
* @since 1.1.0
*/
public static interface Options {
void save(FluoConfiguration fluoConf);
}
public static InstanceArgs configure(String configId) {
return new AeFluentConfigurator(configId);
}
public AccumuloExporter(String configId, SimpleConfiguration appConfig,
AccumuloTranslator<K, V> translator) {
AeFluentConfigurator cfg = AeFluentConfigurator.load(configId, appConfig);
this.writer = AccumuloWriter.getInstance(cfg.getInstance(), cfg.getZookeepers(), cfg.getUser(),
cfg.getPassword(), cfg.getTable());
this.translator = translator;
}
@Override
public void export(Iterator<SequencedExport<K, V>> t) {
ArrayList<Mutation> buffer = new ArrayList<>();
Consumer<Mutation> consumer = buffer::add;
while (t.hasNext()) {
translator.translate(t.next(), consumer);
}
if (buffer.size() > 0) {
writer.write(buffer);
}
}
}
| 6,027 |
0 | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export | Create_ds/fluo-recipes/modules/accumulo/src/main/java/org/apache/fluo/recipes/accumulo/export/function/AccumuloWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.accumulo.export.function;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.accumulo.core.client.Accumulo;
import org.apache.accumulo.core.client.AccumuloClient;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.MutationsRejectedException;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.data.Mutation;
/**
* Writes mutations to Accumulo using a shared batch writer
*
* @since 1.0.0
*/
// intentionally package private
class AccumuloWriter {
private static class Mutations {
List<Mutation> mutations;
CountDownLatch cdl = new CountDownLatch(1);
Mutations(Collection<Mutation> mutations) {
this.mutations = new ArrayList<>(mutations);
}
}
private static class ExportTask implements Runnable {
private BatchWriter bw;
ExportTask(String instanceName, String zookeepers, String user, String password, String table)
throws TableNotFoundException, AccumuloException, AccumuloSecurityException {
AccumuloClient client =
Accumulo.newClient().to(instanceName, zookeepers).as(user, password).build();
// TODO need to close batch writer
try {
bw = client.createBatchWriter(table, new BatchWriterConfig());
} catch (TableNotFoundException tnfe) {
try {
client.tableOperations().create(table);
} catch (TableExistsException e) {
// nothing to do
}
bw = client.createBatchWriter(table, new BatchWriterConfig());
}
}
@Override
public void run() {
ArrayList<AccumuloWriter.Mutations> exports = new ArrayList<>();
while (true) {
try {
exports.clear();
// gather export from all threads that have placed an item on the queue
exports.add(exportQueue.take());
exportQueue.drainTo(exports);
for (AccumuloWriter.Mutations ml : exports) {
bw.addMutations(ml.mutations);
}
bw.flush();
// notify all threads waiting after flushing
for (AccumuloWriter.Mutations ml : exports) {
ml.cdl.countDown();
}
} catch (InterruptedException | MutationsRejectedException e) {
throw new RuntimeException(e);
}
}
}
}
private static LinkedBlockingQueue<AccumuloWriter.Mutations> exportQueue = null;
private AccumuloWriter(String instanceName, String zookeepers, String user, String password,
String table) {
// TODO: fix this write to static and remove findbugs max rank override in pom.xml
exportQueue = new LinkedBlockingQueue<>(10000);
try {
Thread queueProcessingTask =
new Thread(new ExportTask(instanceName, zookeepers, user, password, table));
queueProcessingTask.setDaemon(true);
queueProcessingTask.start();
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private static Map<String, AccumuloWriter> exporters = new HashMap<>();
static synchronized AccumuloWriter getInstance(String instanceName, String zookeepers,
String user, String password, String table) {
String key =
instanceName + ":" + zookeepers + ":" + user + ":" + password.hashCode() + ":" + table;
AccumuloWriter ret = exporters.get(key);
if (ret == null) {
ret = new AccumuloWriter(instanceName, zookeepers, user, password, table);
exporters.put(key, ret);
}
return ret;
}
void write(Collection<Mutation> mutations) {
AccumuloWriter.Mutations work = new Mutations(mutations);
exportQueue.add(work);
try {
work.cdl.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
| 6,028 |
0 | Create_ds/fluo-recipes/modules/kryo/src/test/java/org/apache/fluo/recipes | Create_ds/fluo-recipes/modules/kryo/src/test/java/org/apache/fluo/recipes/kryo/KryoSimpleSerializerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.kryo;
import com.esotericsoftware.kryo.pool.KryoFactory;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
import org.apache.fluo.recipes.core.serialization.SimpleSerializer;
import org.apache.fluo.recipes.kryo.KryoSimplerSerializer;
import org.junit.Assert;
import org.junit.Test;
public class KryoSimpleSerializerTest {
private static final KryoFactory KRYO_FACTORY = new KryoSimplerSerializer.DefaultFactory();
public void testColumn() {
SimpleSerializer serializer = new KryoSimplerSerializer(KRYO_FACTORY);
Column before = new Column("a", "b");
byte[] barray = serializer.serialize(before);
Column after = serializer.deserialize(barray, Column.class);
Assert.assertEquals(before, after);
}
@Test
public void testBytes() {
SimpleSerializer serializer = new KryoSimplerSerializer(KRYO_FACTORY);
Bytes before = Bytes.of("test");
byte[] barray = serializer.serialize(before);
Bytes after = serializer.deserialize(barray, Bytes.class);
Assert.assertEquals(before, after);
}
}
| 6,029 |
0 | Create_ds/fluo-recipes/modules/kryo/src/main/java/org/apache/fluo/recipes | Create_ds/fluo-recipes/modules/kryo/src/main/java/org/apache/fluo/recipes/kryo/KryoSimplerSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.kryo;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Serializable;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.pool.KryoCallback;
import com.esotericsoftware.kryo.pool.KryoFactory;
import com.esotericsoftware.kryo.pool.KryoPool;
import com.google.common.base.Preconditions;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.config.SimpleConfiguration;
import org.apache.fluo.recipes.core.serialization.SimpleSerializer;
/***
* @since 1.0.0
*/
public class KryoSimplerSerializer implements SimpleSerializer, Serializable {
private static final long serialVersionUID = 1L;
private static final String KRYO_FACTORY_PROP = "recipes.serializer.kryo.factory";
private static Map<String, KryoPool> pools = new ConcurrentHashMap<>();
private String factoryType = null;
private transient KryoFactory factory = null;
private static KryoFactory getFactory(String factoryType) {
try {
return KryoSimplerSerializer.class.getClassLoader().loadClass(factoryType)
.asSubclass(KryoFactory.class).newInstance();
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
private KryoPool getPool() {
Preconditions.checkState(factory != null || factoryType != null, "KryFactory not initialized");
if (factory == null) {
return pools.computeIfAbsent(factoryType,
ft -> new KryoPool.Builder(getFactory(ft)).softReferences().build());
} else {
return pools.computeIfAbsent(factory.getClass().getName(),
ft -> new KryoPool.Builder(factory).softReferences().build());
}
}
/**
* @since 1.0.0
*/
public static class DefaultFactory implements KryoFactory {
@Override
public Kryo create() {
Kryo kryo = new Kryo();
return kryo;
}
}
@Override
public <T> byte[] serialize(T obj) {
return getPool().run(kryo -> {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Output output = new Output(baos);
kryo.writeClassAndObject(output, obj);
output.close();
return baos.toByteArray();
});
}
@Override
public <T> T deserialize(byte[] serObj, Class<T> clazz) {
return getPool().run(kryo -> {
ByteArrayInputStream bais = new ByteArrayInputStream(serObj);
Input input = new Input(bais);
return clazz.cast(kryo.readClassAndObject(input));
});
}
@Override
public void init(SimpleConfiguration appConfig) {
Preconditions.checkArgument(factory == null && factoryType == null, "Already initialized");
factoryType = appConfig.getString(KRYO_FACTORY_PROP, DefaultFactory.class.getName());
}
public KryoSimplerSerializer() {}
/**
* Can call this method to create a serializer w/o calling {@link #init(SimpleConfiguration)}
*/
public KryoSimplerSerializer(KryoFactory factory) {
factoryType = factory.getClass().getName();
this.factory = factory;
}
/**
* Call this to configure a KryoFactory type before initializing Fluo.
*/
public static void setKryoFactory(FluoConfiguration config, String factoryType) {
config.getAppConfiguration().setProperty(KRYO_FACTORY_PROP, factoryType);
}
/**
* Call this to configure a KryoFactory type before initializing Fluo.
*/
public static void setKryoFactory(FluoConfiguration config,
Class<? extends KryoFactory> factoryType) {
config.getAppConfiguration().setProperty(KRYO_FACTORY_PROP, factoryType.getName());
}
}
| 6,030 |
0 | Create_ds/fluo-recipes/modules/spark/src/test/java/org/apache/fluo/recipes/spark | Create_ds/fluo-recipes/modules/spark/src/test/java/org/apache/fluo/recipes/spark/it/FluoSparkHelperIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.spark.it;
import java.util.List;
import org.apache.fluo.api.client.FluoFactory;
import org.apache.fluo.api.data.RowColumnValue;
import org.apache.fluo.api.mini.MiniFluo;
import org.apache.fluo.recipes.spark.FluoSparkHelper;
import org.apache.fluo.recipes.spark.FluoSparkTestUtil;
import org.apache.fluo.recipes.test.AccumuloExportITBase;
import org.apache.fluo.recipes.test.FluoITHelper;
import org.apache.hadoop.fs.Path;
import org.apache.spark.api.java.JavaSparkContext;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
public class FluoSparkHelperIT extends AccumuloExportITBase {
static JavaSparkContext ctx;
public FluoSparkHelperIT() {
super(false);
}
@BeforeClass
public static void setupIT() {
ctx = FluoSparkTestUtil.newSparkContext("fluo-spark-helper");
}
@AfterClass
public static void teardownIT() {
ctx.stop();
}
private List<RowColumnValue> getData() {
return FluoITHelper.parse("arow|acf|acq|aval", "brow|bcf|bcq|bval", "crow|ccf|ccq|cval");
}
@Test
public void testAccumuloBulkImport() throws Exception {
FluoSparkHelper fsh =
new FluoSparkHelper(getFluoConfiguration(), ctx.hadoopConfiguration(), new Path("/tmp/"));
List<RowColumnValue> expected = getData();
final String accumuloTable = "table1";
getAccumuloConnector().tableOperations().create(accumuloTable);
fsh.bulkImportRcvToAccumulo(FluoSparkHelper.toPairRDD(ctx.parallelize(expected)), accumuloTable,
new FluoSparkHelper.BulkImportOptions());
Assert.assertTrue(
FluoITHelper.verifyAccumuloTable(getAccumuloConnector(), accumuloTable, expected));
}
@Test
public void testFluoBulkImport() throws Exception {
FluoSparkHelper fsh =
new FluoSparkHelper(getFluoConfiguration(), ctx.hadoopConfiguration(), new Path("/tmp/"));
List<RowColumnValue> expected = getData();
fsh.bulkImportRcvToFluo(FluoSparkHelper.toPairRDD(ctx.parallelize(expected)),
new FluoSparkHelper.BulkImportOptions());
try (MiniFluo miniFluo = FluoFactory.newMiniFluo(getFluoConfiguration())) {
Assert.assertNotNull(miniFluo);
Assert.assertTrue(FluoITHelper.verifyFluoTable(getFluoConfiguration(), expected));
List<RowColumnValue> actualRead = FluoSparkHelper.toRcvRDD(fsh.readFromFluo(ctx)).collect();
Assert.assertTrue(FluoITHelper.verifyRowColumnValues(expected, actualRead));
}
}
}
| 6,031 |
0 | Create_ds/fluo-recipes/modules/spark/src/main/java/org/apache/fluo/recipes | Create_ds/fluo-recipes/modules/spark/src/main/java/org/apache/fluo/recipes/spark/FluoSparkTestUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.spark;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
/**
* Utility code for Fluo/Spark testing
*
* @since 1.0.0
*/
public class FluoSparkTestUtil {
/**
* Creates Java Spark Context for unit/integration testing
*
* @param testName Name of test being run
* @return JavaSparkContext
*/
public static JavaSparkContext newSparkContext(String testName) {
SparkConf sparkConf = new SparkConf();
sparkConf.setMaster("local");
sparkConf.setAppName(testName);
sparkConf.set("spark.app.id", testName);
sparkConf.set("spark.ui.port", "4444");
return new JavaSparkContext(sparkConf);
}
}
| 6,032 |
0 | Create_ds/fluo-recipes/modules/spark/src/main/java/org/apache/fluo/recipes | Create_ds/fluo-recipes/modules/spark/src/main/java/org/apache/fluo/recipes/spark/FluoSparkHelper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.spark;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.accumulo.core.client.Accumulo;
import org.apache.accumulo.core.client.AccumuloClient;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.fluo.api.config.FluoConfiguration;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.RowColumn;
import org.apache.fluo.api.data.RowColumnValue;
import org.apache.fluo.mapreduce.FluoEntryInputFormat;
import org.apache.fluo.mapreduce.FluoKeyValue;
import org.apache.fluo.mapreduce.FluoKeyValueGenerator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.spark.Partitioner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;
/**
* Helper methods for using Fluo with Spark
*
* @since 1.0.0
*/
public class FluoSparkHelper {
private static final Logger log = LoggerFactory.getLogger(FluoSparkHelper.class);
private static AtomicInteger tempDirCounter = new AtomicInteger(0);
private FluoConfiguration fluoConfig;
private Configuration hadoopConfig;
private Path tempBaseDir;
private FileSystem hdfs;
// @formatter:off
public FluoSparkHelper(FluoConfiguration fluoConfig, Configuration hadoopConfig,
Path tempBaseDir) {
// @formatter:on
this.fluoConfig = fluoConfig;
this.hadoopConfig = hadoopConfig;
this.tempBaseDir = tempBaseDir;
this.fluoConfig = fluoConfig;
try {
hdfs = FileSystem.get(hadoopConfig);
} catch (IOException e) {
throw new IllegalStateException("Unable to get HDFS client from hadoop config", e);
}
}
/**
* Converts RowColumnValue RDD to RowColumn/Bytes PairRDD
*
* @param rcvRDD RowColumnValue RDD to convert
* @return RowColumn/Bytes PairRDD
*/
public static JavaPairRDD<RowColumn, Bytes> toPairRDD(JavaRDD<RowColumnValue> rcvRDD) {
return rcvRDD.mapToPair(rcv -> new Tuple2<>(rcv.getRowColumn(), rcv.getValue()));
}
/**
* Converts RowColumn/Bytes PairRDD to RowColumnValue RDD
*
* @param pairRDD RowColumn/Bytes PairRDD
* @return RowColumnValue RDD
*/
public static JavaRDD<RowColumnValue> toRcvRDD(JavaPairRDD<RowColumn, Bytes> pairRDD) {
return pairRDD.map(t -> new RowColumnValue(t._1().getRow(), t._1().getColumn(), t._2()));
}
private static AccumuloClient getAccumuloClient(FluoConfiguration config) {
return Accumulo.newClient().to(config.getAccumuloInstance(), config.getAccumuloZookeepers())
.as(config.getAccumuloUser(), config.getAccumuloPassword()).build();
}
/**
* Reads all data from a snapshot in Fluo and returns it as a RowColumn/Value RDD.
*
* @param ctx Java Spark context
* @return RowColumn/Value RDD containing all data in Fluo
*/
public JavaPairRDD<RowColumn, Bytes> readFromFluo(JavaSparkContext ctx) {
Job job;
try {
job = Job.getInstance(hadoopConfig);
} catch (IOException e) {
throw new IllegalStateException(e);
}
FluoEntryInputFormat.configure(job, fluoConfig);
return ctx.newAPIHadoopRDD(job.getConfiguration(), FluoEntryInputFormat.class, RowColumn.class,
Bytes.class);
}
/**
* Bulk import RowColumn/Value data into Fluo table (obtained from Fluo configuration). This
* method will repartition RDD using the current split points of the Fluo table, creating one
* partition per tablet in the table. This is done so that one RFile is created per tablet for
* bulk import.
*
* @param data RowColumn/Value data to import
* @param opts Bulk import options
*/
public void bulkImportRcvToFluo(JavaPairRDD<RowColumn, Bytes> data, BulkImportOptions opts) {
data = partitionForAccumulo(data, fluoConfig.getAccumuloTable(), opts);
JavaPairRDD<Key, Value> kvData = data.flatMapToPair(tuple -> {
List<Tuple2<Key, Value>> output = new LinkedList<>();
RowColumn rc = tuple._1();
FluoKeyValueGenerator fkvg = new FluoKeyValueGenerator();
fkvg.setRow(rc.getRow()).setColumn(rc.getColumn()).setValue(tuple._2().toArray());
for (FluoKeyValue kv : fkvg.getKeyValues()) {
output.add(new Tuple2<>(kv.getKey(), kv.getValue()));
}
return output.iterator();
});
bulkImportKvToAccumulo(kvData, fluoConfig.getAccumuloTable(), opts);
}
/**
* Bulk import Key/Value data into into Fluo table (obtained from Fluo configuration). This method
* does not repartition data. One RFile will be created for each partition in the passed in RDD.
* Ensure the RDD is reasonably partitioned before calling this method.
*
* @param data Key/Value data to import
* @param opts Bulk import options
*/
public void bulkImportKvToFluo(JavaPairRDD<Key, Value> data, BulkImportOptions opts) {
bulkImportKvToAccumulo(data, fluoConfig.getAccumuloTable(), opts);
}
/**
* Bulk import RowColumn/Value data into specified Accumulo table. This method will repartition
* RDD using the current split points of the specified table, creating one partition per tablet in
* the table. This is done so that one RFile is created per tablet for bulk import.
*
* @param data RowColumn/Value data to import
* @param accumuloTable Accumulo table used for import
* @param opts Bulk import options
*/
public void bulkImportRcvToAccumulo(JavaPairRDD<RowColumn, Bytes> data, String accumuloTable,
BulkImportOptions opts) {
data = partitionForAccumulo(data, accumuloTable, opts);
JavaPairRDD<Key, Value> kvData = data.mapToPair(tuple -> {
RowColumn rc = tuple._1();
byte[] row = rc.getRow().toArray();
byte[] cf = rc.getColumn().getFamily().toArray();
byte[] cq = rc.getColumn().getQualifier().toArray();
byte[] val = tuple._2().toArray();
return new Tuple2<>(new Key(new Text(row), new Text(cf), new Text(cq), 0), new Value(val));
});
bulkImportKvToAccumulo(kvData, accumuloTable, opts);
}
/**
* Bulk import Key/Value data into specified Accumulo table. This method does not repartition
* data. One RFile will be created for each partition in the passed in RDD. Ensure the RDD is
* reasonably partitioned before calling this method.
*
* @param data Key/value data to import
* @param accumuloTable Accumulo table used for import
* @param opts Bulk import options
*/
public void bulkImportKvToAccumulo(JavaPairRDD<Key, Value> data, String accumuloTable,
BulkImportOptions opts) {
Path tempDir = getTempDir(opts);
try (AccumuloClient client = getAccumuloClient(fluoConfig)) {
if (hdfs.exists(tempDir)) {
throw new IllegalArgumentException("HDFS temp dir already exists: " + tempDir.toString());
}
hdfs.mkdirs(tempDir);
Path dataDir = new Path(tempDir.toString() + "/data");
Path failDir = new Path(tempDir.toString() + "/fail");
hdfs.mkdirs(failDir);
// save data to HDFS
Job job = Job.getInstance(hadoopConfig);
AccumuloFileOutputFormat.setOutputPath(job, dataDir);
// must use new API here as saveAsHadoopFile throws exception
data.saveAsNewAPIHadoopFile(dataDir.toString(), Key.class, Value.class,
AccumuloFileOutputFormat.class, job.getConfiguration());
// bulk import data to Accumulo
log.info("Wrote data for bulk import to HDFS temp directory: {}", dataDir);
Connector conn = chooseConnector(client, opts);
conn.tableOperations().importDirectory(accumuloTable, dataDir.toString(), failDir.toString(),
false);
// throw exception if failures directory contains files
if (hdfs.listFiles(failDir, true).hasNext()) {
throw new IllegalStateException("Bulk import failed! Found files that failed to import "
+ "in failures directory: " + failDir);
}
log.info("Successfully bulk imported data in {} to '{}' Accumulo table", dataDir,
accumuloTable);
// delete data directory
hdfs.delete(tempDir, true);
log.info("Deleted HDFS temp directory created for bulk import: {}", tempDir);
// @formatter:off
} catch (IOException | TableNotFoundException | AccumuloException
| AccumuloSecurityException e) {
// @formatter:on
throw new IllegalStateException(e);
}
}
/**
* Optional settings for Bulk Imports
*
* @since 1.0.0
*/
public static class BulkImportOptions {
public static BulkImportOptions DEFAULT = new BulkImportOptions();
Connector conn = null;
Path tempDir = null;
public BulkImportOptions() {}
/**
* If this methods is not called, then a Connector will be created using properties in the
* FluoConfiguration supplied to
* {@link FluoSparkHelper#FluoSparkHelper(FluoConfiguration, Configuration, Path)}
*
* @param conn Use this connector to bulk import files into Accumulo.
* @return this
* @deprecated use {@link #setAccumuloClient(AccumuloClient)}
*/
@Deprecated(since = "1.3.0", forRemoval = true)
public BulkImportOptions setAccumuloConnector(Connector conn) {
Objects.requireNonNull(conn);
this.conn = conn;
return this;
}
/**
* If this methods is not called, then a Client will be created using properties in the
* FluoConfiguration supplied to
* {@link FluoSparkHelper#FluoSparkHelper(FluoConfiguration, Configuration, Path)}
*
* @param client Use this client to bulk import files into Accumulo.
* @return this
*
* @since 1.3.0
*/
public BulkImportOptions setAccumuloClient(AccumuloClient client) {
Objects.requireNonNull(client);
try {
this.conn = Connector.from(client);
} catch (AccumuloSecurityException | AccumuloException e) {
throw new RuntimeException(e);
}
return this;
}
/**
* If this method is not called, then a temp dir will be created based on the path passed
* supplied to {@link FluoSparkHelper#FluoSparkHelper(FluoConfiguration, Configuration, Path)}
*
* @param tempDir Use this directory to store RFiles generated for bulk import.
* @return this
*/
public BulkImportOptions setTempDir(Path tempDir) {
Objects.requireNonNull(tempDir);
this.tempDir = tempDir;
return this;
}
}
private Path getPossibleTempDir() {
return new Path(tempBaseDir.toString() + "/" + tempDirCounter.getAndIncrement());
}
private Path getTempDir(BulkImportOptions opts) {
Path tempDir;
if (opts.tempDir == null) {
try {
tempDir = getPossibleTempDir();
while (hdfs.exists(tempDir)) {
tempDir = getPossibleTempDir();
}
} catch (IOException e) {
throw new IllegalStateException(e);
}
} else {
tempDir = opts.tempDir;
}
return tempDir;
}
private JavaPairRDD<RowColumn, Bytes> partitionForAccumulo(JavaPairRDD<RowColumn, Bytes> data,
String accumuloTable, BulkImportOptions opts) {
// partition and sort data so that one file is created per an accumulo tablet
Partitioner accumuloPartitioner;
try (AccumuloClient client = getAccumuloClient(fluoConfig)) {
accumuloPartitioner = new AccumuloRangePartitioner(
chooseConnector(client, opts).tableOperations().listSplits(accumuloTable));
} catch (TableNotFoundException | AccumuloSecurityException | AccumuloException e) {
throw new IllegalStateException(e);
}
return data.repartitionAndSortWithinPartitions(accumuloPartitioner);
}
private Connector chooseConnector(AccumuloClient client, BulkImportOptions opts) {
try {
return opts.conn == null ? Connector.from(client) : opts.conn;
} catch (AccumuloSecurityException | AccumuloException e) {
throw new RuntimeException(e);
}
}
}
| 6,033 |
0 | Create_ds/fluo-recipes/modules/spark/src/main/java/org/apache/fluo/recipes | Create_ds/fluo-recipes/modules/spark/src/main/java/org/apache/fluo/recipes/spark/AccumuloRangePartitioner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.spark;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.RowColumn;
import org.apache.hadoop.io.Text;
import org.apache.spark.Partitioner;
/**
* @since 1.0.0
*/
public class AccumuloRangePartitioner extends Partitioner {
private static final long serialVersionUID = 1L;
private List<Bytes> splits;
public AccumuloRangePartitioner(Collection<Text> listSplits) {
this.splits = new ArrayList<>(listSplits.size());
for (Text text : listSplits) {
splits.add(Bytes.of(text.getBytes(), 0, text.getLength()));
}
}
@Override
public int getPartition(Object o) {
RowColumn rc = (RowColumn) o;
int index = Collections.binarySearch(splits, rc.getRow());
index = index < 0 ? (index + 1) * -1 : index;
return index;
}
@Override
public int numPartitions() {
return splits.size() + 1;
}
}
| 6,034 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-driver/src/main/java/org/apache/asterix | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-driver/src/main/java/org/apache/asterix/jdbc/Driver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.asterix.jdbc.core.ADBDriverBase;
import org.apache.asterix.jdbc.core.ADBDriverContext;
import org.apache.asterix.jdbc.core.ADBDriverProperty;
import org.apache.asterix.jdbc.core.ADBErrorReporter;
import org.apache.asterix.jdbc.core.ADBProductVersion;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
public class Driver extends ADBDriverBase implements java.sql.Driver {
private static final String DRIVER_SCHEME = "asterixdb:";
private static final int DEFAULT_API_PORT = 19002;
static {
setupLogging(Driver.class);
registerDriver(new Driver());
}
public Driver() {
super(DRIVER_SCHEME, DEFAULT_API_PORT);
}
@Override
protected ADBProtocol createProtocol(String host, int port, Map<ADBDriverProperty, Object> properties,
ADBDriverContext driverContext) throws SQLException {
int loginTimeoutSeconds = DriverManager.getLoginTimeout();
return new ADBProtocol(host, port, properties, driverContext, loginTimeoutSeconds);
}
@Override
protected Properties getURIParameters(URI uri) {
List<NameValuePair> params = URLEncodedUtils.parse(uri, StandardCharsets.UTF_8);
if (params.isEmpty()) {
return null;
}
Properties properties = new Properties();
for (NameValuePair pair : params) {
properties.setProperty(pair.getName(), pair.getValue());
}
return properties;
}
@Override
protected ADBErrorReporter createErrorReporter() {
return new ADBErrorReporter() {
@Override
protected boolean isTimeoutConnectionError(IOException e) {
return isInstanceOf(e, ADBProtocol.TIMEOUT_CONNECTION_ERRORS);
}
@Override
protected boolean isTransientConnectionError(IOException e) {
return isInstanceOf(e, ADBProtocol.TRANSIENT_CONNECTION_ERRORS);
}
};
}
@Override
protected ADBProductVersion getDriverVersion() {
Package driverPackage = getClass().getPackage();
return parseDriverVersion(driverPackage.getImplementationTitle(), driverPackage.getImplementationVersion());
}
private static ADBProductVersion parseDriverVersion(String productName, String productVersion) {
int majorVersion = 0, minorVersion = 0;
if (productVersion != null) {
String[] v = productVersion.split("\\.");
try {
majorVersion = Integer.parseInt(v[0]);
if (v.length > 1) {
minorVersion = Integer.parseInt(v[1]);
}
} catch (NumberFormatException e) {
// ignore
}
}
return new ADBProductVersion(productName, productVersion, majorVersion, minorVersion);
}
}
| 6,035 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-driver/src/main/java/org/apache/asterix | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-driver/src/main/java/org/apache/asterix/jdbc/ADBProtocol.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.NoRouteToHostException;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.asterix.jdbc.core.ADBDriverContext;
import org.apache.asterix.jdbc.core.ADBDriverProperty;
import org.apache.asterix.jdbc.core.ADBErrorReporter;
import org.apache.asterix.jdbc.core.ADBProtocolBase;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpHost;
import org.apache.http.HttpStatus;
import org.apache.http.NoHttpResponseException;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.AuthCache;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpOptions;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.config.SocketConfig;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.conn.HttpClientConnectionManager;
import org.apache.http.conn.HttpHostConnectException;
import org.apache.http.entity.ContentProducer;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.EntityTemplate;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.client.BasicAuthCache;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.exc.InvalidDefinitionException;
final class ADBProtocol extends ADBProtocolBase {
private static final String QUERY_SERVICE_ENDPOINT_PATH = "/query/service";
private static final String QUERY_RESULT_ENDPOINT_PATH = "/query/service/result";
private static final String ACTIVE_REQUESTS_ENDPOINT_PATH = "/admin/requests/running";
private static final int CONNECTION_REQUEST_TIMEOUT = 50; // ms
static final List<Class<? extends IOException>> TIMEOUT_CONNECTION_ERRORS =
Collections.singletonList(ConnectTimeoutException.class);
static final List<Class<? extends IOException>> TRANSIENT_CONNECTION_ERRORS =
Arrays.asList(NoRouteToHostException.class, NoHttpResponseException.class, HttpHostConnectException.class);
final URI queryEndpoint;
final URI queryResultEndpoint;
final URI activeRequestsEndpoint;
final HttpClientConnectionManager httpConnectionManager;
final HttpClientContext httpClientContext;
final CloseableHttpClient httpClient;
public ADBProtocol(String host, int port, Map<ADBDriverProperty, Object> params, ADBDriverContext driverContext,
int loginTimeoutSeconds) throws SQLException {
super(driverContext, params);
boolean sslEnabled = (Boolean) ADBDriverProperty.Common.SSL.fetchPropertyValue(params);
URI queryEndpoint = createEndpointUri(sslEnabled, host, port, QUERY_SERVICE_ENDPOINT_PATH,
driverContext.getErrorReporter());
URI queryResultEndpoint =
createEndpointUri(sslEnabled, host, port, QUERY_RESULT_ENDPOINT_PATH, driverContext.getErrorReporter());
URI activeRequestsEndpoint = createEndpointUri(sslEnabled, host, port, getActiveRequestsEndpointPath(params),
driverContext.getErrorReporter());
PoolingHttpClientConnectionManager httpConnectionManager = new PoolingHttpClientConnectionManager();
int maxConnections = Math.max(16, Runtime.getRuntime().availableProcessors());
httpConnectionManager.setDefaultMaxPerRoute(maxConnections);
httpConnectionManager.setMaxTotal(maxConnections);
SocketConfig.Builder socketConfigBuilder = null;
Number socketTimeoutMillis = (Number) ADBDriverProperty.Common.SOCKET_TIMEOUT.fetchPropertyValue(params);
if (socketTimeoutMillis != null) {
socketConfigBuilder = SocketConfig.custom();
socketConfigBuilder.setSoTimeout(socketTimeoutMillis.intValue());
}
if (socketConfigBuilder != null) {
httpConnectionManager.setDefaultSocketConfig(socketConfigBuilder.build());
}
RequestConfig.Builder requestConfigBuilder = RequestConfig.custom();
Number connectTimeoutMillis = (Number) ADBDriverProperty.Common.CONNECT_TIMEOUT.fetchPropertyValue(params);
int connectTimeout = Math.max(0, connectTimeoutMillis != null ? connectTimeoutMillis.intValue()
: (int) TimeUnit.SECONDS.toMillis(loginTimeoutSeconds));
requestConfigBuilder.setConnectTimeout(connectTimeout);
if (socketTimeoutMillis != null) {
requestConfigBuilder.setSocketTimeout(socketTimeoutMillis.intValue());
}
requestConfigBuilder.setConnectionRequestTimeout(CONNECTION_REQUEST_TIMEOUT);
RequestConfig requestConfig = requestConfigBuilder.build();
HttpClientBuilder httpClientBuilder = HttpClientBuilder.create();
httpClientBuilder.setConnectionManager(httpConnectionManager);
httpClientBuilder.setConnectionManagerShared(true);
httpClientBuilder.setDefaultRequestConfig(requestConfig);
if (user != null) {
String password = (String) ADBDriverProperty.Common.PASSWORD.fetchPropertyValue(params);
httpClientBuilder.setDefaultCredentialsProvider(createCredentialsProvider(user, password));
}
this.queryEndpoint = queryEndpoint;
this.queryResultEndpoint = queryResultEndpoint;
this.activeRequestsEndpoint = activeRequestsEndpoint;
this.httpConnectionManager = httpConnectionManager;
this.httpClient = httpClientBuilder.build();
this.httpClientContext = createHttpClientContext(queryEndpoint);
}
@Override
public void close() throws SQLException {
try {
httpClient.close();
} catch (IOException e) {
throw getErrorReporter().errorClosingResource(e);
} finally {
httpConnectionManager.shutdown();
}
}
@Override
public String connect() throws SQLException {
String databaseVersion = pingImpl(-1, true); // TODO:review timeout
if (getLogger().isLoggable(Level.FINE)) {
getLogger().log(Level.FINE, String.format("connected to '%s' at %s", databaseVersion, queryEndpoint));
}
return databaseVersion;
}
@Override
public boolean ping(int timeoutSeconds) {
try {
pingImpl(timeoutSeconds, false);
return true;
} catch (SQLException e) {
return false;
}
}
private String pingImpl(int timeoutSeconds, boolean fetchDatabaseVersion) throws SQLException {
//TODO: support timeoutSeconds: -1 = use default, 0 = indefinite ?
HttpOptions httpOptions = new HttpOptions(queryEndpoint);
try (CloseableHttpResponse response = httpClient.execute(httpOptions, httpClientContext)) {
int statusCode = response.getStatusLine().getStatusCode();
switch (statusCode) {
case HttpStatus.SC_OK:
String databaseVersion = null;
if (fetchDatabaseVersion) {
Header serverHeader = response.getFirstHeader(HttpHeaders.SERVER);
if (serverHeader != null) {
databaseVersion = serverHeader.getValue();
}
}
return databaseVersion;
case HttpStatus.SC_UNAUTHORIZED:
case HttpStatus.SC_FORBIDDEN:
throw getErrorReporter().errorAuth();
default:
throw getErrorReporter().errorInConnection(String.valueOf(response.getStatusLine()));
}
} catch (IOException e) {
throw getErrorReporter().errorInConnection(e);
}
}
@Override
public QueryServiceResponse submitStatement(String sql, List<?> args, SubmitStatementOptions options)
throws SQLException {
HttpPost httpPost = new HttpPost(queryEndpoint);
httpPost.setHeader(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON
.withParameters(new BasicNameValuePair(FORMAT_LOSSLESS_ADM, Boolean.TRUE.toString())).toString());
ByteArrayOutputStreamImpl baos = new ByteArrayOutputStreamImpl(512);
try {
JsonGenerator jsonGen =
driverContext.getGenericObjectWriter().getFactory().createGenerator(baos, JsonEncoding.UTF8);
jsonGen.writeStartObject();
jsonGen.writeStringField(CLIENT_TYPE, CLIENT_TYPE_JDBC);
jsonGen.writeStringField(MODE, MODE_DEFERRED);
jsonGen.writeStringField(STATEMENT, sql);
jsonGen.writeBooleanField(SIGNATURE, true);
jsonGen.writeStringField(PLAN_FORMAT, PLAN_FORMAT_STRING);
jsonGen.writeNumberField(MAX_WARNINGS, maxWarnings);
if (options.compileOnly) {
jsonGen.writeBooleanField(COMPILE_ONLY, true);
}
if (options.forceReadOnly) {
jsonGen.writeBooleanField(READ_ONLY, true);
}
if (options.sqlCompatMode) {
jsonGen.writeBooleanField(SQL_COMPAT, true);
}
if (options.timeoutSeconds > 0) {
jsonGen.writeStringField(TIMEOUT, options.timeoutSeconds + "s");
}
if (options.dataverseName != null) {
jsonGen.writeStringField(DATAVERSE, options.dataverseName);
}
if (options.executionId != null) {
jsonGen.writeStringField(CLIENT_CONTEXT_ID, options.executionId.toString());
}
if (args != null && !args.isEmpty()) {
jsonGen.writeFieldName(ARGS);
driverContext.getAdmFormatObjectWriter().writeValue(jsonGen, args);
}
jsonGen.writeEndObject();
jsonGen.flush();
} catch (InvalidDefinitionException e) {
throw getErrorReporter().errorUnexpectedType(e.getType().getRawClass());
} catch (IOException e) {
throw getErrorReporter().errorInRequestGeneration(e);
}
if (getLogger().isLoggable(Level.FINE)) {
getLogger().log(Level.FINE, String.format("%s { %s } with args { %s }",
options.compileOnly ? "compile" : "execute", sql, args != null ? args : ""));
}
httpPost.setEntity(new EntityTemplateImpl(baos, ContentType.APPLICATION_JSON));
try (CloseableHttpResponse httpResponse = httpClient.execute(httpPost, httpClientContext)) {
return handlePostQueryResponse(httpResponse);
} catch (JsonProcessingException e) {
throw getErrorReporter().errorInProtocol(e);
} catch (IOException e) {
throw getErrorReporter().errorInConnection(e);
}
}
private QueryServiceResponse handlePostQueryResponse(CloseableHttpResponse httpResponse)
throws SQLException, IOException {
int httpStatus = httpResponse.getStatusLine().getStatusCode();
switch (httpStatus) {
case HttpStatus.SC_OK:
case HttpStatus.SC_BAD_REQUEST:
case HttpStatus.SC_INTERNAL_SERVER_ERROR:
case HttpStatus.SC_SERVICE_UNAVAILABLE:
break;
case HttpStatus.SC_UNAUTHORIZED:
case HttpStatus.SC_FORBIDDEN:
throw getErrorReporter().errorAuth();
default:
throw getErrorReporter().errorInProtocol(httpResponse.getStatusLine().toString());
}
QueryServiceResponse response;
try (InputStream contentStream = httpResponse.getEntity().getContent()) {
response =
driverContext.getGenericObjectReader().forType(QueryServiceResponse.class).readValue(contentStream);
}
QueryServiceResponse.Status status = response.status;
if (httpStatus == HttpStatus.SC_OK && status == QueryServiceResponse.Status.SUCCESS) {
return response;
}
if (status == QueryServiceResponse.Status.TIMEOUT) {
throw getErrorReporter().errorTimeout();
}
SQLException exc = getErrorIfExists(response);
if (exc != null) {
throw exc;
} else {
throw getErrorReporter().errorInProtocol(httpResponse.getStatusLine().toString());
}
}
@Override
public JsonParser fetchResult(QueryServiceResponse response, SubmitStatementOptions options) throws SQLException {
if (response.handle == null) {
throw getErrorReporter().errorInProtocol();
}
int p = response.handle.lastIndexOf("/");
if (p < 0) {
throw getErrorReporter().errorInProtocol(response.handle);
}
String handlePath = response.handle.substring(p);
URI resultRequestURI;
try {
resultRequestURI = new URI(queryResultEndpoint + handlePath);
} catch (URISyntaxException e) {
throw getErrorReporter().errorInProtocol(handlePath);
}
HttpGet httpGet = new HttpGet(resultRequestURI);
httpGet.setHeader(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.getMimeType());
CloseableHttpResponse httpResponse = null;
InputStream httpContentStream = null;
JsonParser parser = null;
try {
httpResponse = httpClient.execute(httpGet, httpClientContext);
int httpStatus = httpResponse.getStatusLine().getStatusCode();
if (httpStatus != HttpStatus.SC_OK) {
throw getErrorReporter().errorNoResult();
}
HttpEntity entity = httpResponse.getEntity();
httpContentStream = entity.getContent();
parser = driverContext.getGenericObjectReader().getFactory()
.createParser(new InputStreamWithAttachedResource(httpContentStream, httpResponse));
if (!advanceToArrayField(parser, RESULTS)) {
throw getErrorReporter().errorInProtocol();
}
parser.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, true);
return parser;
} catch (SQLException e) {
closeQuietly(e, parser, httpContentStream, httpResponse);
throw e;
} catch (JsonProcessingException e) {
closeQuietly(e, parser, httpContentStream, httpResponse);
throw getErrorReporter().errorInProtocol(e);
} catch (IOException e) {
closeQuietly(e, parser, httpContentStream, httpResponse);
throw getErrorReporter().errorInConnection(e);
}
}
private boolean advanceToArrayField(JsonParser parser, String fieldName) throws IOException {
if (parser.nextToken() != JsonToken.START_OBJECT) {
return false;
}
for (;;) {
JsonToken token = parser.nextValue();
if (token == null || token == JsonToken.END_OBJECT) {
return false;
}
if (parser.currentName().equals(fieldName)) {
return token == JsonToken.START_ARRAY;
} else if (token.isStructStart()) {
parser.skipChildren();
} else {
parser.nextToken();
}
}
}
@Override
public void cancelRunningStatement(UUID executionId) throws SQLException {
HttpDelete httpDelete;
try {
URIBuilder uriBuilder = new URIBuilder(activeRequestsEndpoint);
uriBuilder.setParameter(CLIENT_CONTEXT_ID, String.valueOf(executionId));
httpDelete = new HttpDelete(uriBuilder.build());
} catch (URISyntaxException e) {
throw getErrorReporter().errorInRequestURIGeneration(e);
}
try (CloseableHttpResponse httpResponse = httpClient.execute(httpDelete, httpClientContext)) {
int httpStatus = httpResponse.getStatusLine().getStatusCode();
switch (httpStatus) {
case HttpStatus.SC_OK:
case HttpStatus.SC_NOT_FOUND:
break;
case HttpStatus.SC_UNAUTHORIZED:
case HttpStatus.SC_FORBIDDEN:
throw getErrorReporter().errorAuth();
default:
throw getErrorReporter().errorInProtocol(httpResponse.getStatusLine().toString());
}
} catch (IOException e) {
throw getErrorReporter().errorInConnection(e);
}
}
@Override
public ADBErrorReporter getErrorReporter() {
return driverContext.getErrorReporter();
}
@Override
public Logger getLogger() {
return driverContext.getLogger();
}
private static CredentialsProvider createCredentialsProvider(String user, String password) {
CredentialsProvider cp = new BasicCredentialsProvider();
cp.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(user, password));
return cp;
}
private static HttpClientContext createHttpClientContext(URI uri) {
HttpClientContext hcCtx = HttpClientContext.create();
AuthCache ac = new BasicAuthCache();
ac.put(new HttpHost(uri.getHost(), uri.getPort(), uri.getScheme()), new BasicScheme());
hcCtx.setAuthCache(ac);
return hcCtx;
}
private static void closeQuietly(Exception mainExc, java.io.Closeable... closeableList) {
for (Closeable closeable : closeableList) {
if (closeable != null) {
try {
closeable.close();
} catch (IOException e) {
if (mainExc != null) {
mainExc.addSuppressed(e);
}
}
}
}
}
private static URI createEndpointUri(boolean sslEnabled, String host, int port, String path,
ADBErrorReporter errorReporter) throws SQLException {
try {
return new URI(sslEnabled ? "https" : "http", null, host, port, path, null, null);
} catch (URISyntaxException e) {
throw errorReporter.errorParameterValueNotSupported("endpoint " + host + ":" + port);
}
}
private String getActiveRequestsEndpointPath(Map<ADBDriverProperty, Object> params) {
String path = (String) ADBDriverProperty.Common.ACTIVE_REQUESTS_PATH.fetchPropertyValue(params);
return path != null ? path : ACTIVE_REQUESTS_ENDPOINT_PATH;
}
static final class ByteArrayOutputStreamImpl extends ByteArrayOutputStream implements ContentProducer {
private ByteArrayOutputStreamImpl(int size) {
super(size);
}
}
static final class EntityTemplateImpl extends EntityTemplate {
private final long contentLength;
private EntityTemplateImpl(ByteArrayOutputStreamImpl baos, ContentType contentType) {
super(baos);
contentLength = baos.size();
setContentType(contentType.toString());
}
@Override
public long getContentLength() {
return contentLength;
}
}
static final class InputStreamWithAttachedResource extends FilterInputStream {
private final Closeable resource;
private InputStreamWithAttachedResource(InputStream delegate, Closeable resource) {
super(delegate);
this.resource = Objects.requireNonNull(resource);
}
@Override
public void close() throws IOException {
try {
super.close();
} finally {
resource.close();
}
}
}
}
| 6,036 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test/jdbc/JdbcDriverTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jdbc;
import java.lang.reflect.Method;
import java.net.InetAddress;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.test.runtime.ExecutionTestUtil;
import org.apache.hyracks.control.nc.NodeControllerService;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class JdbcDriverTest {
static final String ASTERIX_APP_PATH_PROPERTY = "asterix-app.dir";
static final String ASTERIX_APP_PATH = System.getProperty(ASTERIX_APP_PATH_PROPERTY);
static final List<Class<? extends JdbcTester>> TESTER_CLASSES =
Arrays.asList(JdbcMetadataTester.class, JdbcConnectionTester.class, JdbcStatementTester.class,
JdbcPreparedStatementTester.class, JdbcResultSetTester.JdbcStatementResultSetTester.class,
JdbcResultSetTester.JdbcPreparedStatementResultSetTester.class, JdbcStatementParameterTester.class);
public static final String TEST_METHOD_PREFIX = "test";
private static JdbcTester.JdbcTestContext testContext;
private final Class<? extends JdbcTester> testerClass;
private final Method testMethod;
public JdbcDriverTest(String simpleClassName, String methodName) throws Exception {
Optional<Class<? extends JdbcTester>> testerClassRef =
TESTER_CLASSES.stream().filter(c -> c.getSimpleName().equals(simpleClassName)).findFirst();
if (testerClassRef.isEmpty()) {
throw new Exception("Cannot find class: " + simpleClassName);
}
testerClass = testerClassRef.get();
Optional<Method> testMethodRef = Arrays.stream(testerClassRef.get().getMethods())
.filter(m -> m.getName().equals(methodName)).findFirst();
if (testMethodRef.isEmpty()) {
throw new Exception("Cannot find method: " + methodName + " in class " + testerClass.getName());
}
testMethod = testMethodRef.get();
}
@Parameterized.Parameters(name = "JdbcDriverTest {index}: {0}.{1}")
public static Collection<Object[]> tests() {
List<Object[]> testsuite = new ArrayList<>();
for (Class<? extends JdbcTester> testerClass : TESTER_CLASSES) {
Arrays.stream(testerClass.getMethods()).map(Method::getName).filter(n -> n.startsWith(TEST_METHOD_PREFIX))
.sorted().forEach(n -> testsuite.add(new Object[] { testerClass.getSimpleName(), n }));
}
return testsuite;
}
@BeforeClass
public static void setUp() throws Exception {
if (ASTERIX_APP_PATH == null) {
throw new Exception(String.format("Property %s is not set", ASTERIX_APP_PATH_PROPERTY));
}
Path ccConfigFile = Path.of(ASTERIX_APP_PATH, "src", TEST_METHOD_PREFIX, "resources", "cc.conf");
ExecutionTestUtil.setUp(true, ccConfigFile.toString(), ExecutionTestUtil.integrationUtil, false,
Collections.emptyList());
NodeControllerService nc = ExecutionTestUtil.integrationUtil.ncs[0];
String host = InetAddress.getLoopbackAddress().getHostAddress();
INcApplicationContext appCtx = (INcApplicationContext) nc.getApplicationContext();
int apiPort = appCtx.getExternalProperties().getNcApiPort();
testContext = JdbcTester.createTestContext(host, apiPort);
}
@AfterClass
public static void tearDown() throws Exception {
ExecutionTestUtil.tearDown(true, false);
}
@Test
public void test() throws Exception {
JdbcTester tester = testerClass.getDeclaredConstructor().newInstance();
tester.setTestContext(testContext);
testMethod.invoke(tester);
}
}
| 6,037 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test/jdbc/JdbcPreparedStatementTester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jdbc;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.jdbc.core.ADBPreparedStatement;
import org.junit.Assert;
class JdbcPreparedStatementTester extends JdbcTester {
public void testLifecycle() throws SQLException {
Connection c = createConnection();
PreparedStatement s = c.prepareStatement(Q1);
Assert.assertFalse(s.isClosed());
Assert.assertSame(c, s.getConnection());
s.close();
Assert.assertTrue(s.isClosed());
// ok to call close() on a closed statement
s.close();
Assert.assertTrue(s.isClosed());
}
public void testAutoCloseOnConnectionClose() throws SQLException {
Connection c = createConnection();
// check that a statement is automatically closed when the connection is closed
PreparedStatement s = c.prepareStatement(Q1);
Assert.assertFalse(s.isClosed());
c.close();
Assert.assertTrue(s.isClosed());
}
public void testCloseOnCompletion() throws SQLException {
try (Connection c = createConnection()) {
PreparedStatement s = c.prepareStatement(Q1);
Assert.assertFalse(s.isCloseOnCompletion());
s.closeOnCompletion();
Assert.assertTrue(s.isCloseOnCompletion());
Assert.assertFalse(s.isClosed());
ResultSet rs = s.executeQuery();
Assert.assertTrue(rs.next());
Assert.assertFalse(rs.next());
rs.close();
Assert.assertTrue(s.isClosed());
}
}
public void testExecuteQuery() throws SQLException {
try (Connection c = createConnection()) {
// Query -> ok
try (PreparedStatement s1 = c.prepareStatement(Q1); ResultSet rs1 = s1.executeQuery()) {
Assert.assertTrue(rs1.next());
Assert.assertEquals(1, rs1.getMetaData().getColumnCount());
Assert.assertEquals(V1, rs1.getInt(1));
Assert.assertFalse(rs1.next());
Assert.assertFalse(rs1.isClosed());
}
// DDL -> error
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testExecuteQuery");
try {
PreparedStatement s2 = c.prepareStatement(printCreateDataverse(dataverse));
s2.executeQuery();
Assert.fail("DDL did not fail in executeQuery()");
} catch (SQLException e) {
String msg = e.getMessage();
Assert.assertTrue(msg, msg.contains(ErrorCode.PROHIBITED_STATEMENT_CATEGORY.errorCode()));
}
// DML -> error
String dataset = "ds1";
PreparedStatement s3 = c.prepareStatement(printCreateDataverse(dataverse));
s3.execute();
PreparedStatement s4 = c.prepareStatement(printCreateDataset(dataverse, dataset));
s4.execute();
try {
PreparedStatement s5 = c.prepareStatement(printInsert(dataverse, dataset, dataGen("x", 1, 2)));
s5.executeQuery();
Assert.fail("DML did not fail in executeQuery()");
} catch (SQLException e) {
String msg = e.getMessage();
Assert.assertTrue(msg, msg.contains(ErrorCode.PROHIBITED_STATEMENT_CATEGORY.errorCode()));
}
// Cleanup
PreparedStatement s6 = c.prepareStatement(printDropDataverse(dataverse));
s6.execute();
}
}
public void testExecuteUpdate() throws SQLException {
try (Connection c = createConnection()) {
// DDL -> ok
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testExecuteUpdate");
PreparedStatement s1 = c.prepareStatement(printCreateDataverse(dataverse));
int res = s1.executeUpdate();
Assert.assertEquals(0, res);
String dataset = "ds1";
PreparedStatement s2 = c.prepareStatement(printCreateDataset(dataverse, dataset));
res = s2.executeUpdate();
Assert.assertEquals(0, res);
// DML -> ok
PreparedStatement s3 = c.prepareStatement(printInsert(dataverse, dataset, dataGen("x", 1, 2)));
res = s3.executeUpdate();
// currently, DML statements always return update count = 1
Assert.assertEquals(1, res);
// Query -> error
try {
PreparedStatement s4 = c.prepareStatement(Q1);
s4.executeUpdate();
Assert.fail("Query did not fail in executeUpdate()");
} catch (SQLException e) {
String msg = e.getMessage();
Assert.assertTrue(msg, msg.contains("Invalid statement category"));
}
// Cleanup
PreparedStatement s5 = c.prepareStatement(printDropDataverse(dataverse));
s5.executeUpdate();
}
}
public void testExecute() throws SQLException {
try (Connection c = createConnection()) {
// Query -> ok
PreparedStatement s1 = c.prepareStatement(Q1);
boolean res = s1.execute();
Assert.assertTrue(res);
Assert.assertEquals(-1, s1.getUpdateCount());
try (ResultSet rs = s1.getResultSet()) {
Assert.assertTrue(rs.next());
Assert.assertEquals(1, rs.getMetaData().getColumnCount());
Assert.assertEquals(V1, rs.getInt(1));
Assert.assertFalse(rs.next());
Assert.assertFalse(rs.isClosed());
}
// DDL -> ok
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testExecute");
PreparedStatement s2 = c.prepareStatement(printCreateDataverse(dataverse));
res = s2.execute();
Assert.assertFalse(res);
Assert.assertEquals(0, s2.getUpdateCount());
String dataset = "ds1";
PreparedStatement s3 = c.prepareStatement(printCreateDataset(dataverse, dataset));
res = s3.execute();
Assert.assertFalse(res);
// DML -> ok
PreparedStatement s4 = c.prepareStatement(printInsert(dataverse, dataset, dataGen("x", 1, 2)));
res = s4.execute();
Assert.assertFalse(res);
// currently, DML statements always return update count = 1
Assert.assertEquals(1, s4.getUpdateCount());
// Cleanup
PreparedStatement s5 = c.prepareStatement(printDropDataverse(dataverse));
s5.execute();
}
}
public void testGetResultSet() throws SQLException {
try (Connection c = createConnection()) {
// Query
PreparedStatement s1 = c.prepareStatement(Q1);
boolean res = s1.execute();
Assert.assertTrue(res);
ResultSet rs = s1.getResultSet();
Assert.assertFalse(rs.isClosed());
Assert.assertTrue(rs.next());
Assert.assertFalse(s1.getMoreResults()); // closes current ResultSet
Assert.assertTrue(rs.isClosed());
PreparedStatement s2 = c.prepareStatement(Q1);
res = s2.execute();
Assert.assertTrue(res);
rs = s2.getResultSet();
Assert.assertFalse(rs.isClosed());
Assert.assertTrue(rs.next());
Assert.assertFalse(s2.getMoreResults(Statement.KEEP_CURRENT_RESULT));
Assert.assertFalse(rs.isClosed());
rs.close();
// DDL
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testGetResultSet");
PreparedStatement s3 = c.prepareStatement(printCreateDataverse(dataverse));
res = s3.execute();
Assert.assertFalse(res);
Assert.assertNull(s3.getResultSet());
Assert.assertFalse(s3.getMoreResults());
String dataset = "ds1";
PreparedStatement s4 = c.prepareStatement(printCreateDataset(dataverse, dataset));
res = s4.execute();
Assert.assertFalse(res);
// DML
PreparedStatement s5 = c.prepareStatement(printInsert(dataverse, dataset, dataGen("x", 1, 2)));
res = s5.execute();
Assert.assertFalse(res);
Assert.assertNull(s5.getResultSet());
Assert.assertFalse(s5.getMoreResults());
}
}
public void testMaxRows() throws SQLException {
try (Connection c = createConnection()) {
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testMaxRows");
String dataset = "ds1";
String field = "x";
PreparedStatement s1 = c.prepareStatement(printCreateDataverse(dataverse));
s1.execute();
PreparedStatement s2 = c.prepareStatement(printCreateDataset(dataverse, dataset));
s2.execute();
PreparedStatement s3 = c.prepareStatement(printInsert(dataverse, dataset, dataGen(field, 1, 2, 3)));
s3.execute();
PreparedStatement s4 = c.prepareStatement(String.format("select %s from %s.%s", field,
printDataverseName(dataverse), printIdentifier(dataset)));
s4.setMaxRows(2);
Assert.assertEquals(2, s4.getMaxRows());
try (ResultSet rs = s4.executeQuery()) {
Assert.assertTrue(rs.next());
Assert.assertTrue(rs.next());
Assert.assertFalse(rs.next());
}
}
}
public void testWarnings() throws SQLException {
try (Connection c = createConnection();
PreparedStatement s = c.prepareStatement("select double('x'), bigint('y')"); // --> NULL with warning
ResultSet rs = s.executeQuery()) {
Assert.assertTrue(rs.next());
rs.getDouble(1);
Assert.assertTrue(rs.wasNull());
rs.getLong(2);
Assert.assertTrue(rs.wasNull());
SQLWarning w = s.getWarnings();
Assert.assertNotNull(w);
String msg = w.getMessage();
Assert.assertTrue(msg, msg.contains(ErrorCode.INVALID_FORMAT.errorCode()));
SQLWarning w2 = w.getNextWarning();
Assert.assertNotNull(w2);
String msg2 = w.getMessage();
Assert.assertTrue(msg2, msg2.contains(ErrorCode.INVALID_FORMAT.errorCode()));
Assert.assertNull(w2.getNextWarning());
s.clearWarnings();
Assert.assertNull(s.getWarnings());
}
}
public void testWrapper() throws SQLException {
try (Connection c = createConnection(); PreparedStatement s = c.prepareStatement(Q1)) {
Assert.assertTrue(s.isWrapperFor(ADBPreparedStatement.class));
Assert.assertNotNull(s.unwrap(ADBPreparedStatement.class));
}
}
}
| 6,038 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test/jdbc/JdbcTester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jdbc;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.JDBCType;
import java.sql.SQLException;
import java.time.Duration;
import java.time.Period;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.junit.Assert;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
abstract class JdbcTester {
static final String DEFAULT_DATAVERSE_NAME = "Default";
static final String METADATA_DATAVERSE_NAME = "Metadata";
static final List<String> BUILT_IN_DATAVERSE_NAMES = Arrays.asList(DEFAULT_DATAVERSE_NAME, METADATA_DATAVERSE_NAME);
static final String SQL_STATE_CONNECTION_CLOSED = "08003";
static final char IDENTIFIER_QUOTE = '`';
static final int V1 = 42;
static final String Q1 = printSelect(V1);
static final String Q2 = "select r x, r * 11 y from range(1, 9) r order by r";
static final String Q3_PROJECT = "int8(v) c1_i1, int16(v) c2_i2, int32(v) c3_i4, int64(v) c4_i8, float(v) c5_r4, "
+ "double(v) c6_r8, 'a' || string(v) c7_s, boolean(v+2) c8_b, date_from_unix_time_in_days(v) c9_d, "
+ "time_from_unix_time_in_ms((v+3)*1000) c10_t, datetime_from_unix_time_in_secs(v) c11_dt,"
+ "get_year_month_duration(duration_from_months(v)) c12_um, "
+ "get_day_time_duration(duration_from_ms(v)) c13_ut, "
+ "duration('P'||string(v+3)||'MT'||string(v+3)||'S') c14_uu, "
+ "uuid('5c848e5c-6b6a-498f-8452-8847a295742' || string(v+3)) c15_id";
static final String Q3 = String.format("select %s from range(-1, 1) r let v=nullif(r,0)*2 order by r", Q3_PROJECT);
static String[] Q3_COLUMNS = new String[] { "c1_i1", "c2_i2", "c3_i4", "c4_i8", "c5_r4", "c6_r8", "c7_s", "c8_b",
"c9_d", "c10_t", "c11_dt", "c12_um", "c13_ut", "c14_uu", "c15_id" };
static JDBCType[] Q3_COLUMN_TYPES_JDBC = new JDBCType[] { JDBCType.TINYINT, JDBCType.SMALLINT, JDBCType.INTEGER,
JDBCType.BIGINT, JDBCType.REAL, JDBCType.DOUBLE, JDBCType.VARCHAR, JDBCType.BOOLEAN, JDBCType.DATE,
JDBCType.TIME, JDBCType.TIMESTAMP, JDBCType.OTHER, JDBCType.OTHER, JDBCType.OTHER, JDBCType.OTHER };
static String[] Q3_COLUMN_TYPES_ADB = new String[] { "int8", "int16", "int32", "int64", "float", "double", "string",
"boolean", "date", "time", "datetime", "year-month-duration", "day-time-duration", "duration", "uuid" };
static Class<?>[] Q3_COLUMN_TYPES_JAVA = new Class<?>[] { Byte.class, Short.class, Integer.class, Long.class,
Float.class, Double.class, String.class, Boolean.class, java.sql.Date.class, java.sql.Time.class,
java.sql.Timestamp.class, Period.class, Duration.class, String.class, UUID.class };
protected JdbcTestContext testContext;
protected JdbcTester() {
}
void setTestContext(JdbcTestContext testContext) {
this.testContext = Objects.requireNonNull(testContext);
}
static JdbcTestContext createTestContext(String host, int port) {
return new JdbcTestContext(host, port);
}
protected Connection createConnection() throws SQLException {
return DriverManager.getConnection(testContext.getJdbcUrl());
}
protected Connection createConnection(String dataverseName) throws SQLException {
return createConnection(Collections.singletonList(dataverseName));
}
protected Connection createConnection(List<String> dataverseName) throws SQLException {
return DriverManager.getConnection(testContext.getJdbcUrl(getCanonicalDataverseName(dataverseName)));
}
protected static String getCanonicalDataverseName(List<String> dataverseName) {
return String.join("/", dataverseName);
}
protected static String printDataverseName(List<String> dataverseName) {
return dataverseName.stream().map(JdbcTester::printIdentifier).collect(Collectors.joining("."));
}
protected static String printIdentifier(String ident) {
return IDENTIFIER_QUOTE + ident + IDENTIFIER_QUOTE;
}
protected static String printCreateDataverse(List<String> dataverseName) {
return String.format("create dataverse %s", printDataverseName(dataverseName));
}
protected static String printDropDataverse(List<String> dataverseName) {
return String.format("drop dataverse %s", printDataverseName(dataverseName));
}
protected static String printCreateDataset(List<String> dataverseName, String datasetName) {
return String.format("create dataset %s.%s(_id uuid) open type primary key _id autogenerated",
printDataverseName(dataverseName), printIdentifier(datasetName));
}
protected static String printCreateDataset(List<String> dataverseName, String datasetName, List<String> fieldNames,
List<String> fieldTypes, int pkLen) {
return String.format("create dataset %s.%s(%s) open type primary key %s", printDataverseName(dataverseName),
printIdentifier(datasetName), printSchema(fieldNames, fieldTypes),
printIdentifierList(fieldNames.subList(0, pkLen)));
}
protected static String printCreateView(List<String> dataverseName, String viewName, List<String> fieldNames,
List<String> fieldTypes, int pkLen, List<String> fkRefs, String viewQuery) {
List<String> pkFieldNames = fieldNames.subList(0, pkLen);
String pkDecl = String.format(" primary key (%s) not enforced", printIdentifierList(pkFieldNames));
String fkDecl =
fkRefs.stream()
.map(fkRef -> String.format("foreign key (%s) references %s not enforced",
printIdentifierList(pkFieldNames), printIdentifier(fkRef)))
.collect(Collectors.joining(" "));
return String.format("create view %s.%s(%s) default null %s %s as %s", printDataverseName(dataverseName),
printIdentifier(viewName), printSchema(fieldNames, fieldTypes), pkDecl, fkDecl, viewQuery);
}
protected static String printSchema(List<String> fieldNames, List<String> fieldTypes) {
StringBuilder schema = new StringBuilder(128);
for (int i = 0, n = fieldNames.size(); i < n; i++) {
if (i > 0) {
schema.append(',');
}
schema.append(printIdentifier(fieldNames.get(i))).append(' ').append(fieldTypes.get(i));
}
return schema.toString();
}
protected static String printIdentifierList(List<String> fieldNames) {
return fieldNames.stream().map(JdbcTester::printIdentifier).collect(Collectors.joining(","));
}
protected static String printInsert(List<String> dataverseName, String datasetName, ArrayNode values) {
return String.format("insert into %s.%s (%s)", printDataverseName(dataverseName), printIdentifier(datasetName),
values);
}
protected static String printSelect(Object... values) {
return String.format("select %s", Arrays.stream(values).map(String::valueOf).collect(Collectors.joining(",")));
}
protected static ArrayNode dataGen(String fieldName1, Object... data1) {
ObjectMapper om = new ObjectMapper();
ArrayNode values = om.createArrayNode();
for (Object v : data1) {
ObjectNode obj = om.createObjectNode();
obj.putPOJO(fieldName1, v);
values.add(obj);
}
return values;
}
protected static <T> void assertErrorOnClosed(T param, JdbcConnectionTester.JdbcRunnable<T> cmd,
String description) {
try {
cmd.run(param);
Assert.fail(String.format("Unexpected: %s succeeded on a closed %s", description,
param.getClass().getSimpleName()));
} catch (SQLException e) {
String msg = e.getMessage();
Assert.assertTrue(msg, msg.contains("closed"));
}
}
static class JdbcTestContext {
private static final String JDBC_URL_TEMPLATE = "jdbc:asterixdb://%s:%d";
private final String jdbcUrl;
private JdbcTestContext(String host, int port) {
jdbcUrl = String.format(JDBC_URL_TEMPLATE, host, port);
}
public String getJdbcUrl() {
return jdbcUrl;
}
public String getJdbcUrl(String dataverseName) {
return jdbcUrl + '/' + dataverseName;
}
}
interface JdbcRunnable<T> {
void run(T param) throws SQLException;
}
interface JdbcPredicate<T> {
boolean test(T param) throws SQLException;
}
static class CloseablePair<K extends AutoCloseable, V extends AutoCloseable> extends Pair<K, V>
implements AutoCloseable {
CloseablePair(K first, V second) {
super(first, second);
}
@Override
public void close() throws SQLException {
try {
if (second != null) {
try {
second.close();
} catch (SQLException e) {
throw e;
} catch (Exception e) {
throw new SQLException(e);
}
}
} finally {
if (first != null) {
try {
first.close();
} catch (SQLException e) {
throw e;
} catch (Exception e) {
throw new SQLException(e);
}
}
}
}
}
}
| 6,039 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test/jdbc/JdbcMetadataTester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jdbc;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.JDBCType;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLType;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.asterix.jdbc.core.ADBDatabaseMetaData;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.junit.Assert;
class JdbcMetadataTester extends JdbcTester {
static final String TABLE_CAT = "TABLE_CAT";
static final String TABLE_CATALOG = "TABLE_CATALOG";
static final String TABLE_SCHEM = "TABLE_SCHEM";
static final String TABLE_NAME = "TABLE_NAME";
static final String TABLE_TYPE = "TABLE_TYPE";
static final String TABLE = "TABLE";
static final String VIEW = "VIEW";
static final String COLUMN_NAME = "COLUMN_NAME";
static final String DATA_TYPE = "DATA_TYPE";
static final String TYPE_NAME = "TYPE_NAME";
static final String ORDINAL_POSITION = "ORDINAL_POSITION";
static final String NULLABLE = "NULLABLE";
static final String KEY_SEQ = "KEY_SEQ";
static final String PKTABLE_CAT = "PKTABLE_CAT";
static final String PKTABLE_SCHEM = "PKTABLE_SCHEM";
static final String PKTABLE_NAME = "PKTABLE_NAME";
static final String PKCOLUMN_NAME = "PKCOLUMN_NAME";
static final String FKTABLE_CAT = "FKTABLE_CAT";
static final String FKTABLE_SCHEM = "FKTABLE_SCHEM";
static final String FKTABLE_NAME = "FKTABLE_NAME";
static final String FKCOLUMN_NAME = "FKCOLUMN_NAME";
static final String STRING = "string";
static final String BIGINT = "int64";
static final String DOUBLE = "double";
static final List<String> DATASET_COLUMN_NAMES = Arrays.asList("tc", "ta", "tb");
static final List<String> VIEW_COLUMN_NAMES = Arrays.asList("vb", "vc", "va");
static final List<String> DATASET_COLUMN_TYPES = Arrays.asList(STRING, BIGINT, DOUBLE);
static final List<SQLType> DATASET_COLUMN_JDBC_TYPES =
Arrays.asList(JDBCType.VARCHAR, JDBCType.BIGINT, JDBCType.DOUBLE);
static final int DATASET_PK_LEN = 2;
public void testLifecycle() throws SQLException {
Connection c = createConnection();
Assert.assertSame(c, c.getMetaData().getConnection());
c.close();
try {
c.getMetaData();
Assert.fail("Got metadata on a closed connection");
} catch (SQLException e) {
Assert.assertEquals(SQL_STATE_CONNECTION_CLOSED, e.getSQLState());
}
}
public void testProperties() throws SQLException {
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
Assert.assertEquals(testContext.getJdbcUrl(), md.getURL());
Assert.assertNotNull(md.getDriverName());
Assert.assertNotNull(md.getDriverVersion());
Assert.assertNotNull(md.getDatabaseProductName());
Assert.assertNotNull(md.getDatabaseProductVersion());
Assert.assertEquals(4, md.getJDBCMajorVersion());
Assert.assertEquals(2, md.getJDBCMinorVersion());
Assert.assertTrue(md.isCatalogAtStart());
Assert.assertEquals(".", md.getCatalogSeparator());
Assert.assertEquals("`", md.getIdentifierQuoteString());
Assert.assertTrue(md.allTablesAreSelectable());
Assert.assertTrue(md.nullsAreSortedLow());
Assert.assertFalse(md.nullsAreSortedHigh());
Assert.assertFalse(md.nullsAreSortedAtStart());
Assert.assertFalse(md.nullsAreSortedAtEnd());
Assert.assertFalse(md.supportsCatalogsInTableDefinitions());
Assert.assertFalse(md.supportsCatalogsInIndexDefinitions());
Assert.assertFalse(md.supportsCatalogsInDataManipulation());
Assert.assertFalse(md.supportsSchemasInTableDefinitions());
Assert.assertFalse(md.supportsSchemasInIndexDefinitions());
Assert.assertFalse(md.supportsSchemasInDataManipulation());
Assert.assertTrue(md.supportsSubqueriesInComparisons());
Assert.assertTrue(md.supportsSubqueriesInExists());
Assert.assertTrue(md.supportsSubqueriesInIns());
Assert.assertTrue(md.supportsCorrelatedSubqueries());
Assert.assertTrue(md.supportsOrderByUnrelated());
Assert.assertTrue(md.supportsExpressionsInOrderBy());
Assert.assertTrue(md.supportsGroupBy());
Assert.assertTrue(md.supportsGroupByUnrelated());
Assert.assertTrue(md.supportsGroupByBeyondSelect());
Assert.assertTrue(md.supportsOuterJoins());
Assert.assertTrue(md.supportsMinimumSQLGrammar());
Assert.assertTrue(md.supportsTableCorrelationNames());
Assert.assertTrue(md.supportsUnionAll());
}
}
public void testGetCatalogs() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getCatalogs()) {
assertColumnValues(rs, TABLE_CAT, BUILT_IN_DATAVERSE_NAMES);
}
List<List<String>> newDataverseList = new ArrayList<>();
try {
createDataverses(s, newDataverseList);
List<String> allCatalogs = new ArrayList<>(BUILT_IN_DATAVERSE_NAMES);
for (List<String> n : newDataverseList) {
allCatalogs.add(getCanonicalDataverseName(n));
}
try (ResultSet rs = md.getCatalogs()) {
assertColumnValues(rs, TABLE_CAT, allCatalogs);
}
} finally {
dropDataverses(s, newDataverseList);
}
}
}
public void testGetCatalogsResultSetLifecycle() throws SQLException {
// check that Connection.close() closes metadata ResultSet
Connection c = createConnection();
DatabaseMetaData md = c.getMetaData();
ResultSet rs = md.getCatalogs();
Assert.assertFalse(rs.isClosed());
c.close();
Assert.assertTrue(rs.isClosed());
}
public void testGetSchemas() throws SQLException {
// get schemas in the default dataverse
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getSchemas()) {
assertColumnValues(rs, Arrays.asList(TABLE_SCHEM, TABLE_CATALOG), Arrays
.asList(Collections.singletonList(null), Collections.singletonList(DEFAULT_DATAVERSE_NAME)));
}
}
// get schemas in the connection's dataverse
try (Connection c = createConnection(METADATA_DATAVERSE_NAME)) {
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getSchemas()) {
assertColumnValues(rs, Arrays.asList(TABLE_SCHEM, TABLE_CATALOG), Arrays
.asList(Collections.singletonList(null), Collections.singletonList(METADATA_DATAVERSE_NAME)));
}
}
// get schemas in the connection's dataverse #2
try (Connection c = createConnection()) {
c.setCatalog(METADATA_DATAVERSE_NAME);
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getSchemas()) {
assertColumnValues(rs, Arrays.asList(TABLE_SCHEM, TABLE_CATALOG), Arrays
.asList(Collections.singletonList(null), Collections.singletonList(METADATA_DATAVERSE_NAME)));
}
}
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
// we don't have any schemas without catalogs
try (ResultSet rs = md.getSchemas("", null)) {
Assert.assertEquals(0, countRows(rs));
}
}
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<List<String>> newDataverseList = new ArrayList<>();
try {
createDataverses(s, newDataverseList);
List<String> allCatalogs = new ArrayList<>(BUILT_IN_DATAVERSE_NAMES);
for (List<String> n : newDataverseList) {
allCatalogs.add(getCanonicalDataverseName(n));
}
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getSchemas("", null)) {
Assert.assertFalse(rs.next());
}
try (ResultSet rs = md.getSchemas(null, null)) {
assertColumnValues(rs, Arrays.asList(TABLE_SCHEM, TABLE_CATALOG),
Arrays.asList(Collections.nCopies(allCatalogs.size(), null), allCatalogs));
}
try (ResultSet rs = md.getSchemas("x", null)) {
assertColumnValues(rs, Arrays.asList(TABLE_SCHEM, TABLE_CATALOG),
Arrays.asList(Collections.singletonList(null), Collections.singletonList("x")));
}
} finally {
dropDataverses(s, newDataverseList);
}
}
}
public void testGetTableTypes() throws SQLException {
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getTableTypes()) {
assertColumnValues(rs, TABLE_TYPE, Arrays.asList(TABLE, VIEW));
}
}
}
public void testGetTables() throws SQLException {
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getTables(METADATA_DATAVERSE_NAME, null, null, null)) {
int n = countRows(rs);
Assert.assertTrue(String.valueOf(n), n > 10);
}
try (ResultSet rs = md.getTables(METADATA_DATAVERSE_NAME, null, "Data%", null)) {
int n = countRows(rs);
Assert.assertTrue(String.valueOf(n), n > 2);
}
// we don't have any tables without catalogs
try (ResultSet rs = md.getTables("", null, null, null)) {
int n = countRows(rs);
Assert.assertEquals(0, n);
}
}
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<List<String>> newDataverseList = new ArrayList<>();
List<Pair<List<String>, String>> newDatasetList = new ArrayList<>();
List<Pair<List<String>, String>> newViewList = new ArrayList<>();
try {
createDataversesDatasetsViews(s, newDataverseList, newDatasetList, newViewList);
DatabaseMetaData md = c.getMetaData();
List<String> expectedColumns = Arrays.asList(TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE);
List<String> expectedTableCat = new ArrayList<>();
List<String> expectedTableSchem = new ArrayList<>();
List<String> expectedTableName = new ArrayList<>();
List<String> expectedTableType = new ArrayList<>();
// Test getTables() in all catalogs
for (Pair<List<String>, String> p : newDatasetList) {
expectedTableCat.add(getCanonicalDataverseName(p.first));
expectedTableSchem.add(null);
expectedTableName.add(p.second);
expectedTableType.add(TABLE);
}
// using table name pattern
try (ResultSet rs = md.getTables(null, null, "t%", null)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedTableType));
}
// using table type
try (ResultSet rs = md.getTables(null, null, null, new String[] { TABLE })) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedTableType),
JdbcMetadataTester::isMetadataCatalog);
}
// all tables
for (Pair<List<String>, String> p : newViewList) {
expectedTableCat.add(getCanonicalDataverseName(p.first));
expectedTableSchem.add(null);
expectedTableName.add(p.second);
expectedTableType.add(VIEW);
}
try (ResultSet rs = md.getTables(null, null, null, null)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedTableType),
JdbcMetadataTester::isMetadataCatalog);
}
try (ResultSet rs = md.getTables(null, "", null, null)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedTableType),
JdbcMetadataTester::isMetadataCatalog);
}
try (ResultSet rs = md.getTables(null, null, null, new String[] { TABLE, VIEW })) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedTableType),
JdbcMetadataTester::isMetadataCatalog);
}
// Test getTables() in a particular catalog
for (List<String> dvi : newDataverseList) {
expectedTableCat.clear();
expectedTableSchem.clear();
expectedTableName.clear();
expectedTableType.clear();
String dvic = getCanonicalDataverseName(dvi);
for (Pair<List<String>, String> p : newDatasetList) {
String dv = getCanonicalDataverseName(p.first);
if (dv.equals(dvic)) {
expectedTableCat.add(dv);
expectedTableSchem.add(null);
expectedTableName.add(p.second);
expectedTableType.add(TABLE);
}
}
// using table name pattern
try (ResultSet rs = md.getTables(dvic, null, "t%", null)) {
assertColumnValues(rs, expectedColumns, Arrays.asList(expectedTableCat, expectedTableSchem,
expectedTableName, expectedTableType));
}
// using table type
try (ResultSet rs = md.getTables(dvic, null, null, new String[] { TABLE })) {
assertColumnValues(rs, expectedColumns, Arrays.asList(expectedTableCat, expectedTableSchem,
expectedTableName, expectedTableType));
}
for (Pair<List<String>, String> p : newViewList) {
String dv = getCanonicalDataverseName(p.first);
if (dv.equals(dvic)) {
expectedTableCat.add(dv);
expectedTableSchem.add(null);
expectedTableName.add(p.second);
expectedTableType.add(VIEW);
}
}
try (ResultSet rs = md.getTables(dvic, null, null, null)) {
assertColumnValues(rs, expectedColumns, Arrays.asList(expectedTableCat, expectedTableSchem,
expectedTableName, expectedTableType));
}
try (ResultSet rs = md.getTables(dvic, "", null, null)) {
assertColumnValues(rs, expectedColumns, Arrays.asList(expectedTableCat, expectedTableSchem,
expectedTableName, expectedTableType));
}
try (ResultSet rs = md.getTables(dvic, null, null, new String[] { TABLE, VIEW })) {
assertColumnValues(rs, expectedColumns, Arrays.asList(expectedTableCat, expectedTableSchem,
expectedTableName, expectedTableType));
}
}
// non-existent catalog
try (ResultSet rs = md.getTables("UNKNOWN", null, null, null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent schema
try (ResultSet rs = md.getTables(null, "UNKNOWN", null, null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent table name
try (ResultSet rs = md.getTables(null, null, "UNKNOWN", null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent table type
try (ResultSet rs = md.getTables(null, null, null, new String[] { "UNKNOWN" })) {
Assert.assertEquals(0, countRows(rs));
}
} finally {
dropDataverses(s, newDataverseList);
}
}
}
public void testGetColumns() throws SQLException {
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getColumns(METADATA_DATAVERSE_NAME, null, null, null)) {
int n = countRows(rs);
Assert.assertTrue(String.valueOf(n), n > 50);
}
try (ResultSet rs = md.getColumns(METADATA_DATAVERSE_NAME, null, "Data%", null)) {
int n = countRows(rs);
Assert.assertTrue(String.valueOf(n), n > 20);
}
// we don't have any columns without catalogs
try (ResultSet rs = md.getColumns("", null, null, null)) {
int n = countRows(rs);
Assert.assertEquals(0, n);
}
}
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<List<String>> newDataverseList = new ArrayList<>();
List<Pair<List<String>, String>> newDatasetList = new ArrayList<>();
List<Pair<List<String>, String>> newViewList = new ArrayList<>();
try {
createDataversesDatasetsViews(s, newDataverseList, newDatasetList, newViewList);
DatabaseMetaData md = c.getMetaData();
List<String> expectedColumns = Arrays.asList(TABLE_CAT, TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, DATA_TYPE,
TYPE_NAME, ORDINAL_POSITION, NULLABLE);
List<String> expectedTableCat = new ArrayList<>();
List<String> expectedTableSchem = new ArrayList<>();
List<String> expectedTableName = new ArrayList<>();
List<String> expectedColumnName = new ArrayList<>();
List<Integer> expectedDataType = new ArrayList<>();
List<String> expectedTypeName = new ArrayList<>();
List<Integer> expectedOrdinalPosition = new ArrayList<>();
List<Integer> expectedNullable = new ArrayList<>();
// Test getColumns() in all catalogs
// datasets only
for (Pair<List<String>, String> p : newDatasetList) {
for (int i = 0, n = DATASET_COLUMN_NAMES.size(); i < n; i++) {
String columnName = DATASET_COLUMN_NAMES.get(i);
String columnType = DATASET_COLUMN_TYPES.get(i);
SQLType columnJdbcType = DATASET_COLUMN_JDBC_TYPES.get(i);
expectedTableCat.add(getCanonicalDataverseName(p.first));
expectedTableSchem.add(null);
expectedTableName.add(p.second);
expectedColumnName.add(columnName);
expectedDataType.add(columnJdbcType.getVendorTypeNumber());
expectedTypeName.add(columnType);
expectedOrdinalPosition.add(i + 1);
expectedNullable.add(
i < DATASET_PK_LEN ? DatabaseMetaData.columnNoNulls : DatabaseMetaData.columnNullable);
}
}
// using column name pattern
try (ResultSet rs = md.getColumns(null, null, null, "t%")) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedColumnName,
expectedDataType, expectedTypeName, expectedOrdinalPosition, expectedNullable));
}
// using table name pattern
try (ResultSet rs = md.getColumns(null, null, "t%", null)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedColumnName,
expectedDataType, expectedTypeName, expectedOrdinalPosition, expectedNullable));
}
// all columns
expectedTableCat.clear();
expectedTableSchem.clear();
expectedTableName.clear();
expectedColumnName.clear();
expectedDataType.clear();
expectedTypeName.clear();
expectedOrdinalPosition.clear();
expectedNullable.clear();
int dsIdx = 0, vIdx = 0;
for (List<String> dvName : newDataverseList) {
String dvNameCanonical = getCanonicalDataverseName(dvName);
for (; dsIdx < newDatasetList.size() && newDatasetList.get(dsIdx).first.equals(dvName); dsIdx++) {
String dsName = newDatasetList.get(dsIdx).second;
addExpectedColumnNamesForGetColumns(dvNameCanonical, dsName, DATASET_COLUMN_NAMES,
expectedTableCat, expectedTableSchem, expectedTableName, expectedColumnName,
expectedDataType, expectedTypeName, expectedOrdinalPosition, expectedNullable);
}
for (; vIdx < newViewList.size() && newViewList.get(vIdx).first.equals(dvName); vIdx++) {
String vName = newViewList.get(vIdx).second;
addExpectedColumnNamesForGetColumns(dvNameCanonical, vName, VIEW_COLUMN_NAMES, expectedTableCat,
expectedTableSchem, expectedTableName, expectedColumnName, expectedDataType,
expectedTypeName, expectedOrdinalPosition, expectedNullable);
}
}
try (ResultSet rs = md.getColumns(null, null, null, null)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedColumnName,
expectedDataType, expectedTypeName, expectedOrdinalPosition, expectedNullable),
JdbcMetadataTester::isMetadataCatalog);
}
try (ResultSet rs = md.getColumns(null, "", null, null)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName, expectedColumnName,
expectedDataType, expectedTypeName, expectedOrdinalPosition, expectedNullable),
JdbcMetadataTester::isMetadataCatalog);
}
// Test getColumns() in a particular catalog
for (List<String> dvName : newDataverseList) {
expectedTableCat.clear();
expectedTableSchem.clear();
expectedTableName.clear();
expectedColumnName.clear();
expectedDataType.clear();
expectedTypeName.clear();
expectedOrdinalPosition.clear();
expectedNullable.clear();
String dvNameCanonical = getCanonicalDataverseName(dvName);
for (Pair<List<String>, String> p : newDatasetList) {
if (dvName.equals(p.first)) {
addExpectedColumnNamesForGetColumns(dvNameCanonical, p.second, DATASET_COLUMN_NAMES,
expectedTableCat, expectedTableSchem, expectedTableName, expectedColumnName,
expectedDataType, expectedTypeName, expectedOrdinalPosition, expectedNullable);
}
}
try (ResultSet rs = md.getColumns(dvNameCanonical, null, "t%", null)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName,
expectedColumnName, expectedDataType, expectedTypeName, expectedOrdinalPosition,
expectedNullable),
JdbcMetadataTester::isMetadataCatalog);
}
try (ResultSet rs = md.getColumns(dvNameCanonical, null, null, "t%")) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedTableCat, expectedTableSchem, expectedTableName,
expectedColumnName, expectedDataType, expectedTypeName, expectedOrdinalPosition,
expectedNullable),
JdbcMetadataTester::isMetadataCatalog);
}
}
// non-existent catalog
try (ResultSet rs = md.getColumns("UNKNOWN", null, null, null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent schema
try (ResultSet rs = md.getColumns(null, "UNKNOWN", null, null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent table name
try (ResultSet rs = md.getColumns(null, null, "UNKNOWN", null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent column names
try (ResultSet rs = md.getColumns(null, null, null, "UNKNOWN")) {
Assert.assertEquals(0, countRows(rs));
}
} finally {
dropDataverses(s, newDataverseList);
}
}
}
private void addExpectedColumnNamesForGetColumns(String dvNameCanonical, String dsName, List<String> columnNames,
List<String> outTableCat, List<String> outTableSchem, List<String> outTableName, List<String> outColumnName,
List<Integer> outDataType, List<String> outTypeName, List<Integer> outOrdinalPosition,
List<Integer> outNullable) {
for (int i = 0; i < columnNames.size(); i++) {
String columnName = columnNames.get(i);
String columnType = DATASET_COLUMN_TYPES.get(i);
SQLType columnJdbcType = DATASET_COLUMN_JDBC_TYPES.get(i);
outTableCat.add(dvNameCanonical);
outTableSchem.add(null);
outTableName.add(dsName);
outColumnName.add(columnName);
outDataType.add(columnJdbcType.getVendorTypeNumber());
outTypeName.add(columnType);
outOrdinalPosition.add(i + 1);
outNullable.add(i < JdbcMetadataTester.DATASET_PK_LEN ? DatabaseMetaData.columnNoNulls
: DatabaseMetaData.columnNullable);
}
}
public void testGetPrimaryKeys() throws SQLException {
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getPrimaryKeys(METADATA_DATAVERSE_NAME, null, null)) {
int n = countRows(rs);
Assert.assertTrue(String.valueOf(n), n > 20);
}
try (ResultSet rs = md.getPrimaryKeys(METADATA_DATAVERSE_NAME, null, "Data%")) {
int n = countRows(rs);
Assert.assertTrue(String.valueOf(n), n > 4);
}
// we don't have any tables without catalogs
try (ResultSet rs = md.getPrimaryKeys("", null, null)) {
int n = countRows(rs);
Assert.assertEquals(0, n);
}
}
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<List<String>> newDataverseList = new ArrayList<>();
List<Pair<List<String>, String>> newDatasetList = new ArrayList<>();
List<Pair<List<String>, String>> newViewList = new ArrayList<>();
try {
createDataversesDatasetsViews(s, newDataverseList, newDatasetList, newViewList);
DatabaseMetaData md = c.getMetaData();
List<String> expectedColumns = Arrays.asList(TABLE_CAT, TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, KEY_SEQ);
List<String> expectedTableCat = new ArrayList<>();
List<String> expectedTableSchem = new ArrayList<>();
List<String> expectedTableName = new ArrayList<>();
List<String> expectedColumnName = new ArrayList<>();
List<Integer> expectedKeySeq = new ArrayList<>();
// Test getPrimaryKeys() for a particular dataset/view
for (int i = 0, n = newDatasetList.size(); i < n; i++) {
for (int j = 0; j < 2; j++) {
Pair<List<String>, String> p = j == 0 ? newDatasetList.get(i) : newViewList.get(i);
List<String> columnNames = j == 0 ? DATASET_COLUMN_NAMES : VIEW_COLUMN_NAMES;
String dvNameCanonical = getCanonicalDataverseName(p.first);
String dsName = p.second;
expectedTableCat.clear();
expectedTableSchem.clear();
expectedTableName.clear();
expectedColumnName.clear();
expectedKeySeq.clear();
List<String> pkColumnNames = columnNames.subList(0, DATASET_PK_LEN);
addExpectedColumnNamesForGetPrimaryKeys(dvNameCanonical, dsName, pkColumnNames,
expectedTableCat, expectedTableSchem, expectedTableName, expectedColumnName,
expectedKeySeq);
try (ResultSet rs = md.getPrimaryKeys(dvNameCanonical, null, dsName)) {
assertColumnValues(rs, expectedColumns, Arrays.asList(expectedTableCat, expectedTableSchem,
expectedTableName, expectedColumnName, expectedKeySeq));
}
}
}
// non-existent catalog
try (ResultSet rs = md.getPrimaryKeys("UNKNOWN", null, null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent schema
try (ResultSet rs = md.getPrimaryKeys(null, "UNKNOWN", null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent table name
try (ResultSet rs = md.getPrimaryKeys(null, null, "UNKNOWN")) {
Assert.assertEquals(0, countRows(rs));
}
} finally {
dropDataverses(s, newDataverseList);
}
}
}
private void addExpectedColumnNamesForGetPrimaryKeys(String dvNameCanonical, String dsName,
List<String> pkColumnNames, List<String> outTableCat, List<String> outTableSchem, List<String> outTableName,
List<String> outColumnName, List<Integer> outKeySeq) {
List<String> pkColumnNamesSorted = new ArrayList<>(pkColumnNames);
Collections.sort(pkColumnNamesSorted);
for (int i = 0; i < pkColumnNames.size(); i++) {
String pkColumnName = pkColumnNamesSorted.get(i);
outTableCat.add(dvNameCanonical);
outTableSchem.add(null);
outTableName.add(dsName);
outColumnName.add(pkColumnName);
outKeySeq.add(pkColumnNames.indexOf(pkColumnName) + 1);
}
}
public void testGetImportedKeys() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<List<String>> newDataverseList = new ArrayList<>();
List<Pair<List<String>, String>> newDatasetList = new ArrayList<>();
List<Pair<List<String>, String>> newViewList = new ArrayList<>();
try {
createDataversesDatasetsViews(s, newDataverseList, newDatasetList, newViewList);
DatabaseMetaData md = c.getMetaData();
List<String> expectedColumns = Arrays.asList(PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, PKCOLUMN_NAME,
FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, FKCOLUMN_NAME, KEY_SEQ);
List<String> expectedPKTableCat = new ArrayList<>();
List<String> expectedPKTableSchem = new ArrayList<>();
List<String> expectedPKTableName = new ArrayList<>();
List<String> expectedPKColumnName = new ArrayList<>();
List<String> expectedFKTableCat = new ArrayList<>();
List<String> expectedFKTableSchem = new ArrayList<>();
List<String> expectedFKTableName = new ArrayList<>();
List<String> expectedFKColumnName = new ArrayList<>();
List<Integer> expectedKeySeq = new ArrayList<>();
// Test getImportedKeys() for a particular view
for (int i = 0, n = newViewList.size(); i < n; i++) {
Pair<List<String>, String> p = newViewList.get(i);
List<String> dvName = p.first;
String dvNameCanonical = getCanonicalDataverseName(dvName);
String viewName = p.second;
expectedPKTableCat.clear();
expectedPKTableSchem.clear();
expectedPKTableName.clear();
expectedPKColumnName.clear();
expectedFKTableCat.clear();
expectedFKTableSchem.clear();
expectedFKTableName.clear();
expectedFKColumnName.clear();
expectedKeySeq.clear();
List<String> pkFkColumnNames = VIEW_COLUMN_NAMES.subList(0, DATASET_PK_LEN);
List<String> fkRefs = IntStream.range(0, i).mapToObj(newViewList::get)
.filter(p2 -> p2.first.equals(dvName)).map(p2 -> p2.second).collect(Collectors.toList());
addExpectedColumnNamesForGetImportedKeys(dvNameCanonical, viewName, pkFkColumnNames, fkRefs,
expectedPKTableCat, expectedPKTableSchem, expectedPKTableName, expectedPKColumnName,
expectedFKTableCat, expectedFKTableSchem, expectedFKTableName, expectedFKColumnName,
expectedKeySeq);
try (ResultSet rs = md.getImportedKeys(dvNameCanonical, null, viewName)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedPKTableCat, expectedPKTableSchem, expectedPKTableName,
expectedPKColumnName, expectedFKTableCat, expectedFKTableSchem,
expectedFKTableName, expectedFKColumnName, expectedKeySeq));
}
}
// non-existent catalog
try (ResultSet rs = md.getImportedKeys("UNKNOWN", null, null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent schema
try (ResultSet rs = md.getImportedKeys(null, "UNKNOWN", null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent table name
try (ResultSet rs = md.getImportedKeys(null, null, "UNKNOWN")) {
Assert.assertEquals(0, countRows(rs));
}
} finally {
dropDataverses(s, newDataverseList);
}
}
}
private void addExpectedColumnNamesForGetImportedKeys(String dvNameCanonical, String dsName,
List<String> pkFkColumnNames, List<String> fkRefs, List<String> outPKTableCat, List<String> outPKTableSchem,
List<String> outPKTableName, List<String> outPKColumnName, List<String> outFKTableCat,
List<String> outFKTableSchem, List<String> outFKTableName, List<String> outFKColumnName,
List<Integer> outKeySeq) {
for (String fkRef : fkRefs) {
for (int i = 0; i < pkFkColumnNames.size(); i++) {
String pkFkColumn = pkFkColumnNames.get(i);
outPKTableCat.add(dvNameCanonical);
outPKTableSchem.add(null);
outPKTableName.add(fkRef);
outPKColumnName.add(pkFkColumn);
outFKTableCat.add(dvNameCanonical);
outFKTableSchem.add(null);
outFKTableName.add(dsName);
outFKColumnName.add(pkFkColumn);
outKeySeq.add(i + 1);
}
}
}
public void testGetExportedKeys() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<List<String>> newDataverseList = new ArrayList<>();
List<Pair<List<String>, String>> newDatasetList = new ArrayList<>();
List<Pair<List<String>, String>> newViewList = new ArrayList<>();
try {
createDataversesDatasetsViews(s, newDataverseList, newDatasetList, newViewList);
DatabaseMetaData md = c.getMetaData();
List<String> expectedColumns = Arrays.asList(PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, PKCOLUMN_NAME,
FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, FKCOLUMN_NAME, KEY_SEQ);
List<String> expectedPKTableCat = new ArrayList<>();
List<String> expectedPKTableSchem = new ArrayList<>();
List<String> expectedPKTableName = new ArrayList<>();
List<String> expectedPKColumnName = new ArrayList<>();
List<String> expectedFKTableCat = new ArrayList<>();
List<String> expectedFKTableSchem = new ArrayList<>();
List<String> expectedFKTableName = new ArrayList<>();
List<String> expectedFKColumnName = new ArrayList<>();
List<Integer> expectedKeySeq = new ArrayList<>();
// Test getExportedKeys() for a particular view
for (int i = 0, n = newViewList.size(); i < n; i++) {
Pair<List<String>, String> p = newViewList.get(i);
List<String> dvName = p.first;
String dvNameCanonical = getCanonicalDataverseName(dvName);
String viewName = p.second;
expectedPKTableCat.clear();
expectedPKTableSchem.clear();
expectedPKTableName.clear();
expectedPKColumnName.clear();
expectedFKTableCat.clear();
expectedFKTableSchem.clear();
expectedFKTableName.clear();
expectedFKColumnName.clear();
expectedKeySeq.clear();
List<String> pkFkColumnNames = VIEW_COLUMN_NAMES.subList(0, DATASET_PK_LEN);
List<String> fkRefs = IntStream.range(i + 1, newViewList.size()).mapToObj(newViewList::get)
.filter(p2 -> p2.first.equals(dvName)).map(p2 -> p2.second).collect(Collectors.toList());
addExpectedColumnNamesForGetExportedKeys(dvNameCanonical, viewName, pkFkColumnNames, fkRefs,
expectedPKTableCat, expectedPKTableSchem, expectedPKTableName, expectedPKColumnName,
expectedFKTableCat, expectedFKTableSchem, expectedFKTableName, expectedFKColumnName,
expectedKeySeq);
try (ResultSet rs = md.getExportedKeys(dvNameCanonical, null, viewName)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedPKTableCat, expectedPKTableSchem, expectedPKTableName,
expectedPKColumnName, expectedFKTableCat, expectedFKTableSchem,
expectedFKTableName, expectedFKColumnName, expectedKeySeq));
}
}
// non-existent catalog
try (ResultSet rs = md.getExportedKeys("UNKNOWN", null, null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent schema
try (ResultSet rs = md.getExportedKeys(null, "UNKNOWN", null)) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent table name
try (ResultSet rs = md.getExportedKeys(null, null, "UNKNOWN")) {
Assert.assertEquals(0, countRows(rs));
}
} finally {
dropDataverses(s, newDataverseList);
}
}
}
private void addExpectedColumnNamesForGetExportedKeys(String dvNameCanonical, String dsName,
List<String> pkFkColumnNames, List<String> fkRefs, List<String> outPKTableCat, List<String> outPKTableSchem,
List<String> outPKTableName, List<String> outPKColumnName, List<String> outFKTableCat,
List<String> outFKTableSchem, List<String> outFKTableName, List<String> outFKColumnName,
List<Integer> outKeySeq) {
for (String fkRef : fkRefs) {
for (int i = 0; i < pkFkColumnNames.size(); i++) {
String pkFkColumn = pkFkColumnNames.get(i);
outPKTableCat.add(dvNameCanonical);
outPKTableSchem.add(null);
outPKTableName.add(dsName);
outPKColumnName.add(pkFkColumn);
outFKTableCat.add(dvNameCanonical);
outFKTableSchem.add(null);
outFKTableName.add(fkRef);
outFKColumnName.add(pkFkColumn);
outKeySeq.add(i + 1);
}
}
}
public void testGetCrossReference() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<List<String>> newDataverseList = new ArrayList<>();
List<Pair<List<String>, String>> newDatasetList = new ArrayList<>();
List<Pair<List<String>, String>> newViewList = new ArrayList<>();
try {
createDataversesDatasetsViews(s, newDataverseList, newDatasetList, newViewList);
DatabaseMetaData md = c.getMetaData();
List<String> expectedColumns = Arrays.asList(PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, PKCOLUMN_NAME,
FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, FKCOLUMN_NAME, KEY_SEQ);
List<String> expectedPKTableCat = new ArrayList<>();
List<String> expectedPKTableSchem = new ArrayList<>();
List<String> expectedPKTableName = new ArrayList<>();
List<String> expectedPKColumnName = new ArrayList<>();
List<String> expectedFKTableCat = new ArrayList<>();
List<String> expectedFKTableSchem = new ArrayList<>();
List<String> expectedFKTableName = new ArrayList<>();
List<String> expectedFKColumnName = new ArrayList<>();
List<Integer> expectedKeySeq = new ArrayList<>();
boolean testUnknown = true;
// Test getCrossReference() for a particular view
for (int i = 0, n = newViewList.size(); i < n; i++) {
Pair<List<String>, String> p = newViewList.get(i);
List<String> dvName = p.first;
String dvNameCanonical = getCanonicalDataverseName(dvName);
String viewName = p.second;
List<String> pkFkColumnNames = VIEW_COLUMN_NAMES.subList(0, DATASET_PK_LEN);
Iterator<String> fkRefIter = IntStream.range(i + 1, newViewList.size()).mapToObj(newViewList::get)
.filter(p2 -> p2.first.equals(dvName)).map(p2 -> p2.second).iterator();
boolean hasFkRefs = fkRefIter.hasNext();
while (fkRefIter.hasNext()) {
String fkRef = fkRefIter.next();
expectedPKTableCat.clear();
expectedPKTableSchem.clear();
expectedPKTableName.clear();
expectedPKColumnName.clear();
expectedFKTableCat.clear();
expectedFKTableSchem.clear();
expectedFKTableName.clear();
expectedFKColumnName.clear();
expectedKeySeq.clear();
addExpectedColumnNamesForGetCrossReference(dvNameCanonical, viewName, pkFkColumnNames, fkRef,
expectedPKTableCat, expectedPKTableSchem, expectedPKTableName, expectedPKColumnName,
expectedFKTableCat, expectedFKTableSchem, expectedFKTableName, expectedFKColumnName,
expectedKeySeq);
try (ResultSet rs =
md.getCrossReference(dvNameCanonical, null, viewName, dvNameCanonical, null, fkRef)) {
assertColumnValues(rs, expectedColumns,
Arrays.asList(expectedPKTableCat, expectedPKTableSchem, expectedPKTableName,
expectedPKColumnName, expectedFKTableCat, expectedFKTableSchem,
expectedFKTableName, expectedFKColumnName, expectedKeySeq));
}
}
if (testUnknown && hasFkRefs) {
testUnknown = false;
// non-existent catalog
try (ResultSet rs =
md.getCrossReference(dvNameCanonical, null, viewName, "UNKNOWN", null, "UNKNOWN")) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent schema
try (ResultSet rs = md.getCrossReference(dvNameCanonical, null, viewName, dvNameCanonical,
"UNKNOWN", "UNKNOWN")) {
Assert.assertEquals(0, countRows(rs));
}
// non-existent table name
try (ResultSet rs = md.getCrossReference(dvNameCanonical, null, viewName, dvNameCanonical, null,
"UNKNOWN")) {
Assert.assertEquals(0, countRows(rs));
}
}
}
} finally {
dropDataverses(s, newDataverseList);
}
}
}
private void addExpectedColumnNamesForGetCrossReference(String dvNameCanonical, String dsName,
List<String> pkFkColumnNames, String fkRef, List<String> outPKTableCat, List<String> outPKTableSchem,
List<String> outPKTableName, List<String> outPKColumnName, List<String> outFKTableCat,
List<String> outFKTableSchem, List<String> outFKTableName, List<String> outFKColumnName,
List<Integer> outKeySeq) {
for (int i = 0; i < pkFkColumnNames.size(); i++) {
String pkFkColumn = pkFkColumnNames.get(i);
outPKTableCat.add(dvNameCanonical);
outPKTableSchem.add(null);
outPKTableName.add(dsName);
outPKColumnName.add(pkFkColumn);
outFKTableCat.add(dvNameCanonical);
outFKTableSchem.add(null);
outFKTableName.add(fkRef);
outFKColumnName.add(pkFkColumn);
outKeySeq.add(i + 1);
}
}
public void testGetTypeInfo() throws SQLException {
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
try (ResultSet rs = md.getTypeInfo()) {
int n = countRows(rs);
Assert.assertTrue(String.valueOf(n), n > 10);
}
}
}
private static boolean isMetadataCatalog(ResultSet rs) throws SQLException {
return METADATA_DATAVERSE_NAME.equals(rs.getString(TABLE_CAT));
}
private void createDataverses(Statement stmt, List<List<String>> outDataverseList) throws SQLException {
for (String p1 : new String[] { "x", "y" }) {
List<String> dv1 = Collections.singletonList(p1);
stmt.execute(printCreateDataverse(dv1));
outDataverseList.add(dv1);
for (int p2i = 0; p2i <= 9; p2i++) {
String p2 = "z" + p2i;
List<String> dv2 = Arrays.asList(p1, p2);
stmt.execute(printCreateDataverse(dv2));
outDataverseList.add(dv2);
}
}
}
private void createDataversesDatasetsViews(Statement stmt, List<List<String>> outDataverseList,
List<Pair<List<String>, String>> outDatasetList, List<Pair<List<String>, String>> outViewList)
throws SQLException {
for (String p1 : new String[] { "x", "y" }) {
for (int p2i = 0; p2i < 2; p2i++) {
String p2 = "z" + p2i;
List<String> dv = Arrays.asList(p1, p2);
stmt.execute(printCreateDataverse(dv));
outDataverseList.add(dv);
for (int i = 0; i < 3; i++) {
// create dataset
String datasetName = createDatasetName(i);
stmt.execute(printCreateDataset(dv, datasetName, DATASET_COLUMN_NAMES, DATASET_COLUMN_TYPES,
DATASET_PK_LEN));
outDatasetList.add(new Pair<>(dv, datasetName));
// create tabular view
String viewName = createViewName(i);
String viewQuery = "select r va, r vb, r vc from range(1,2) r";
List<String> fkRefs = IntStream.range(0, i).mapToObj(JdbcMetadataTester::createViewName)
.collect(Collectors.toList());
stmt.execute(printCreateView(dv, viewName, VIEW_COLUMN_NAMES, DATASET_COLUMN_TYPES, DATASET_PK_LEN,
fkRefs, viewQuery));
outViewList.add(new Pair<>(dv, viewName));
}
}
}
}
private static String createDatasetName(int id) {
return "t" + id;
}
private static String createViewName(int id) {
return "v" + id;
}
private void dropDataverses(Statement stmt, List<List<String>> dataverseList) throws SQLException {
for (List<String> dv : dataverseList) {
stmt.execute(printDropDataverse(dv));
}
}
private void assertColumnValues(ResultSet rs, String column, List<?> values) throws SQLException {
assertColumnValues(rs, Collections.singletonList(column), Collections.singletonList(values));
}
private void assertColumnValues(ResultSet rs, List<String> columns, List<List<?>> values) throws SQLException {
assertColumnValues(rs, columns, values, null);
}
private void assertColumnValues(ResultSet rs, List<String> columns, List<List<?>> values,
JdbcPredicate<ResultSet> skipRowTest) throws SQLException {
int columnCount = columns.size();
Assert.assertEquals(columnCount, values.size());
List<Iterator<?>> valueIters = values.stream().map(List::iterator).collect(Collectors.toList());
while (rs.next()) {
if (skipRowTest != null && skipRowTest.test(rs)) {
continue;
}
for (int i = 0; i < columnCount; i++) {
String column = columns.get(i);
Object expectedValue = valueIters.get(i).next();
Object actualValue;
if (expectedValue instanceof String) {
actualValue = rs.getString(column);
} else if (expectedValue instanceof Integer) {
actualValue = rs.getInt(column);
} else if (expectedValue instanceof Long) {
actualValue = rs.getLong(column);
} else {
actualValue = rs.getObject(column);
}
if (rs.wasNull()) {
Assert.assertNull(expectedValue);
} else {
Assert.assertEquals(expectedValue, actualValue);
}
}
}
for (Iterator<?> i : valueIters) {
if (i.hasNext()) {
Assert.fail(String.valueOf(i.next()));
}
}
}
private int countRows(ResultSet rs) throws SQLException {
int n = 0;
while (rs.next()) {
n++;
}
return n;
}
public void testWrapper() throws SQLException {
try (Connection c = createConnection()) {
DatabaseMetaData md = c.getMetaData();
Assert.assertTrue(md.isWrapperFor(ADBDatabaseMetaData.class));
Assert.assertNotNull(md.unwrap(ADBDatabaseMetaData.class));
}
}
}
| 6,040 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test/jdbc/JdbcStatementTester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jdbc;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.jdbc.core.ADBStatement;
import org.junit.Assert;
class JdbcStatementTester extends JdbcTester {
public void testLifecycle() throws SQLException {
Connection c = createConnection();
Statement s = c.createStatement();
Assert.assertFalse(s.isClosed());
Assert.assertSame(c, s.getConnection());
s.close();
Assert.assertTrue(s.isClosed());
// ok to call close() on a closed statement
s.close();
Assert.assertTrue(s.isClosed());
}
public void testAutoCloseOnConnectionClose() throws SQLException {
Connection c = createConnection();
// check that a statement is automatically closed when the connection is closed
Statement s = c.createStatement();
Assert.assertFalse(s.isClosed());
c.close();
Assert.assertTrue(s.isClosed());
}
public void testCloseOnCompletion() throws SQLException {
try (Connection c = createConnection()) {
Statement s = c.createStatement();
Assert.assertFalse(s.isCloseOnCompletion());
s.closeOnCompletion();
Assert.assertTrue(s.isCloseOnCompletion());
Assert.assertFalse(s.isClosed());
ResultSet rs = s.executeQuery(Q1);
Assert.assertTrue(rs.next());
Assert.assertFalse(rs.next());
rs.close();
Assert.assertTrue(s.isClosed());
}
}
public void testExecuteQuery() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
// Query -> ok
try (ResultSet rs = s.executeQuery(Q1)) {
Assert.assertTrue(rs.next());
Assert.assertEquals(1, rs.getMetaData().getColumnCount());
Assert.assertEquals(V1, rs.getInt(1));
Assert.assertFalse(rs.next());
Assert.assertFalse(rs.isClosed());
}
// DDL -> error
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testExecuteQuery");
try {
s.executeQuery(printCreateDataverse(dataverse));
Assert.fail("DDL did not fail in executeQuery()");
} catch (SQLException e) {
String msg = e.getMessage();
Assert.assertTrue(msg, msg.contains(ErrorCode.PROHIBITED_STATEMENT_CATEGORY.errorCode()));
}
// DML -> error
String dataset = "ds1";
s.execute(printCreateDataverse(dataverse));
s.execute(printCreateDataset(dataverse, dataset));
try {
s.executeQuery(printInsert(dataverse, dataset, dataGen("x", 1, 2)));
Assert.fail("DML did not fail in executeQuery()");
} catch (SQLException e) {
String msg = e.getMessage();
Assert.assertTrue(msg, msg.contains(ErrorCode.PROHIBITED_STATEMENT_CATEGORY.errorCode()));
}
// Cleanup
s.execute(printDropDataverse(dataverse));
}
}
public void testExecuteUpdate() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
// DDL -> ok
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testExecuteUpdate");
int res = s.executeUpdate(printCreateDataverse(dataverse));
Assert.assertEquals(0, res);
String dataset = "ds1";
res = s.executeUpdate(printCreateDataset(dataverse, dataset));
Assert.assertEquals(0, res);
// DML -> ok
res = s.executeUpdate(printInsert(dataverse, dataset, dataGen("x", 1, 2)));
// currently, DML statements always return update count = 1
Assert.assertEquals(1, res);
// Query -> error
try {
s.executeUpdate(Q1);
Assert.fail("Query did not fail in executeUpdate()");
} catch (SQLException e) {
String msg = e.getMessage();
Assert.assertTrue(msg, msg.contains("Invalid statement category"));
}
// Cleanup
s.executeUpdate(printDropDataverse(dataverse));
}
}
public void testExecute() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
// Query -> ok
boolean res = s.execute(Q1);
Assert.assertTrue(res);
Assert.assertEquals(-1, s.getUpdateCount());
try (ResultSet rs = s.getResultSet()) {
Assert.assertTrue(rs.next());
Assert.assertEquals(1, rs.getMetaData().getColumnCount());
Assert.assertEquals(V1, rs.getInt(1));
Assert.assertFalse(rs.next());
Assert.assertFalse(rs.isClosed());
}
// DDL -> ok
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testExecute");
res = s.execute(printCreateDataverse(dataverse));
Assert.assertFalse(res);
Assert.assertEquals(0, s.getUpdateCount());
String dataset = "ds1";
res = s.execute(printCreateDataset(dataverse, dataset));
Assert.assertFalse(res);
// DML -> ok
res = s.execute(printInsert(dataverse, dataset, dataGen("x", 1, 2)));
Assert.assertFalse(res);
// currently, DML statements always return update count = 1
Assert.assertEquals(1, s.getUpdateCount());
// Cleanup
s.execute(printDropDataverse(dataverse));
}
}
public void testGetResultSet() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
// Query
boolean res = s.execute(Q1);
Assert.assertTrue(res);
ResultSet rs = s.getResultSet();
Assert.assertFalse(rs.isClosed());
Assert.assertTrue(rs.next());
Assert.assertFalse(s.getMoreResults()); // closes current ResultSet
Assert.assertTrue(rs.isClosed());
res = s.execute(Q1);
Assert.assertTrue(res);
rs = s.getResultSet();
Assert.assertFalse(rs.isClosed());
Assert.assertTrue(rs.next());
Assert.assertFalse(s.getMoreResults(Statement.KEEP_CURRENT_RESULT));
Assert.assertFalse(rs.isClosed());
rs.close();
// DDL
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testGetResultSet");
res = s.execute(printCreateDataverse(dataverse));
Assert.assertFalse(res);
Assert.assertNull(s.getResultSet());
Assert.assertFalse(s.getMoreResults());
String dataset = "ds1";
res = s.execute(printCreateDataset(dataverse, dataset));
Assert.assertFalse(res);
// DML
res = s.execute(printInsert(dataverse, dataset, dataGen("x", 1, 2)));
Assert.assertFalse(res);
Assert.assertNull(s.getResultSet());
Assert.assertFalse(s.getMoreResults());
}
}
public void testMaxRows() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testMaxRows");
String dataset = "ds1";
String field = "x";
s.execute(printCreateDataverse(dataverse));
s.execute(printCreateDataset(dataverse, dataset));
s.execute(printInsert(dataverse, dataset, dataGen(field, 1, 2, 3)));
s.setMaxRows(2);
Assert.assertEquals(2, s.getMaxRows());
try (ResultSet rs = s.executeQuery(String.format("select %s from %s.%s", field,
printDataverseName(dataverse), printIdentifier(dataset)))) {
Assert.assertTrue(rs.next());
Assert.assertTrue(rs.next());
Assert.assertFalse(rs.next());
}
}
}
public void testWarnings() throws SQLException {
try (Connection c = createConnection();
Statement s = c.createStatement();
ResultSet rs = s.executeQuery("select double('x'), bigint('y')")) { // --> NULL with warning
Assert.assertTrue(rs.next());
rs.getDouble(1);
Assert.assertTrue(rs.wasNull());
rs.getLong(2);
Assert.assertTrue(rs.wasNull());
SQLWarning w = s.getWarnings();
Assert.assertNotNull(w);
String msg = w.getMessage();
Assert.assertTrue(msg, msg.contains(ErrorCode.INVALID_FORMAT.errorCode()));
SQLWarning w2 = w.getNextWarning();
Assert.assertNotNull(w2);
String msg2 = w.getMessage();
Assert.assertTrue(msg2, msg2.contains(ErrorCode.INVALID_FORMAT.errorCode()));
Assert.assertNull(w2.getNextWarning());
s.clearWarnings();
Assert.assertNull(s.getWarnings());
}
}
public void testWrapper() throws SQLException {
try (Connection c = createConnection(); Statement s = c.createStatement()) {
Assert.assertTrue(s.isWrapperFor(ADBStatement.class));
Assert.assertNotNull(s.unwrap(ADBStatement.class));
}
}
}
| 6,041 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test/jdbc/JdbcStatementParameterTester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jdbc;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.JDBCType;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Period;
import java.util.Objects;
import java.util.UUID;
import java.util.function.BiPredicate;
import org.junit.Assert;
class JdbcStatementParameterTester extends JdbcTester {
public void testParameterBinding() throws SQLException {
String[] sqlppValues = new String[] { "int8('10')", "int16('20')", "int32('30')", "int64('40')", "float('1.5')",
"double('2.25')", "true", "'abc'", "date('2000-10-20')", "time('02:03:04')",
"datetime('2000-10-20T02:03:04')", "get_year_month_duration(duration_from_months(2))",
"get_day_time_duration(duration_from_ms(1234))", "uuid('5c848e5c-6b6a-498f-8452-8847a2957421')" };
try (Connection c = createConnection()) {
Byte i1 = (byte) 10;
verifyParameterBinding(c, sqlppValues, i1, PreparedStatement::setByte, ResultSet::getByte);
verifyParameterBinding(c, sqlppValues, i1, PreparedStatement::setObject, ResultSet::getByte);
Short i2 = (short) 20;
verifyParameterBinding(c, sqlppValues, i2, PreparedStatement::setShort, ResultSet::getShort);
verifyParameterBinding(c, sqlppValues, i2, PreparedStatement::setObject, ResultSet::getShort);
Integer i4 = 30;
verifyParameterBinding(c, sqlppValues, i4, PreparedStatement::setInt, ResultSet::getInt);
verifyParameterBinding(c, sqlppValues, i4, PreparedStatement::setObject, ResultSet::getInt);
Long i8 = 40L;
verifyParameterBinding(c, sqlppValues, i8, PreparedStatement::setLong, ResultSet::getLong);
verifyParameterBinding(c, sqlppValues, i8, PreparedStatement::setObject, ResultSet::getLong);
Float r4 = 1.5f;
verifyParameterBinding(c, sqlppValues, r4, PreparedStatement::setFloat, ResultSet::getFloat);
verifyParameterBinding(c, sqlppValues, r4, PreparedStatement::setObject, ResultSet::getFloat);
Double r8 = 2.25;
verifyParameterBinding(c, sqlppValues, r8, PreparedStatement::setDouble, ResultSet::getDouble);
verifyParameterBinding(c, sqlppValues, r8, PreparedStatement::setObject, ResultSet::getDouble);
BigDecimal dec = new BigDecimal("2.25");
verifyParameterBinding(c, sqlppValues, dec, PreparedStatement::setBigDecimal, ResultSet::getBigDecimal);
verifyParameterBinding(c, sqlppValues, dec, PreparedStatement::setObject, ResultSet::getBigDecimal);
Boolean b = true;
verifyParameterBinding(c, sqlppValues, b, PreparedStatement::setBoolean, ResultSet::getBoolean);
verifyParameterBinding(c, sqlppValues, b, PreparedStatement::setObject, ResultSet::getBoolean);
String s = "abc";
verifyParameterBinding(c, sqlppValues, s, PreparedStatement::setString, ResultSet::getString);
verifyParameterBinding(c, sqlppValues, s, PreparedStatement::setObject, ResultSet::getString);
verifyParameterBinding(c, sqlppValues, s, PreparedStatement::setNString, ResultSet::getString);
LocalDate date = LocalDate.of(2000, 10, 20);
verifyParameterBinding(c, sqlppValues, java.sql.Date.valueOf(date), PreparedStatement::setDate,
ResultSet::getDate);
verifyParameterBinding(c, sqlppValues, java.sql.Date.valueOf(date), PreparedStatement::setObject,
ResultSet::getDate);
verifyParameterBinding(c, sqlppValues, date, PreparedStatement::setObject,
(rs, i) -> rs.getObject(i, LocalDate.class));
LocalTime time = LocalTime.of(2, 3, 4);
verifyParameterBinding(c, sqlppValues, java.sql.Time.valueOf(time), PreparedStatement::setTime,
ResultSet::getTime, JdbcStatementParameterTester::sqlTimeEquals);
verifyParameterBinding(c, sqlppValues, java.sql.Time.valueOf(time), PreparedStatement::setObject,
ResultSet::getTime, JdbcStatementParameterTester::sqlTimeEquals);
verifyParameterBinding(c, sqlppValues, time, PreparedStatement::setObject,
(rs, i) -> rs.getObject(i, LocalTime.class));
LocalDateTime datetime = LocalDateTime.of(date, time);
verifyParameterBinding(c, sqlppValues, java.sql.Timestamp.valueOf(datetime),
PreparedStatement::setTimestamp, ResultSet::getTimestamp);
verifyParameterBinding(c, sqlppValues, java.sql.Timestamp.valueOf(datetime), PreparedStatement::setObject,
ResultSet::getTimestamp);
verifyParameterBinding(c, sqlppValues, datetime, PreparedStatement::setObject,
(rs, i) -> rs.getObject(i, LocalDateTime.class));
Period ymDuration = Period.ofMonths(2);
verifyParameterBinding(c, sqlppValues, ymDuration, PreparedStatement::setObject,
(rs, i) -> rs.getObject(i, Period.class));
Duration dtDuration = Duration.ofMillis(1234);
verifyParameterBinding(c, sqlppValues, dtDuration, PreparedStatement::setObject,
(rs, i) -> rs.getObject(i, Duration.class));
UUID uuid = UUID.fromString("5c848e5c-6b6a-498f-8452-8847a2957421");
verifyParameterBinding(c, sqlppValues, uuid, PreparedStatement::setObject, ResultSet::getObject);
}
}
private <T> void verifyParameterBinding(Connection c, String[] sqlppValues, T value, SetParameterByIndex<T> setter,
JdbcResultSetTester.GetColumnByIndex<T> getter) throws SQLException {
verifyParameterBinding(c, sqlppValues, value, setter, getter, Objects::equals);
}
private <T> void verifyParameterBinding(Connection c, String[] sqlppValues, T value, SetParameterByIndex<T> setter,
JdbcResultSetTester.GetColumnByIndex<T> getter, BiPredicate<T, T> cmp) throws SQLException {
try (PreparedStatement s =
c.prepareStatement(String.format("select ? from [%s] v where v = ?", String.join(",", sqlppValues)))) {
for (int i = 1; i <= 2; i++) {
setter.set(s, i, value);
}
try (ResultSet rs = s.executeQuery()) {
if (rs.next()) {
T outValue = getter.get(rs, 1);
if (!cmp.test(value, outValue)) {
Assert.fail(String.format("%s != %s", value, outValue));
}
} else {
Assert.fail(String.format("Empty result (expected value '%s' was not returned)", value));
}
}
}
}
public void testParameterMetadata() throws SQLException {
String q = "select r from range(1, 10) r where r = ? or r = ? or r = ?";
int paramCount = 3;
try (Connection c = createConnection(); PreparedStatement s = c.prepareStatement(q)) {
ParameterMetaData pmd = s.getParameterMetaData();
Assert.assertEquals(paramCount, pmd.getParameterCount());
for (int i = 1; i <= paramCount; i++) {
Assert.assertEquals(JDBCType.OTHER.getVendorTypeNumber().intValue(), pmd.getParameterType(i));
Assert.assertEquals("any", pmd.getParameterTypeName(i));
Assert.assertEquals(ParameterMetaData.parameterModeIn, pmd.getParameterMode(i));
}
}
}
interface SetParameterByIndex<T> {
void set(PreparedStatement s, int paramIndex, T paramValue) throws SQLException;
}
private static boolean sqlTimeEquals(java.sql.Time v1, java.sql.Time v2) {
// java.sql.Time.equals() compares millis since epoch,
// but we only want to compare time components
return v1.toLocalTime().equals(v2.toLocalTime());
}
}
| 6,042 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test/jdbc/JdbcResultSetTester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jdbc;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.Date;
import java.sql.JDBCType;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Period;
import java.time.ZoneOffset;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.asterix.jdbc.core.ADBResultSet;
import org.apache.commons.io.IOUtils;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.junit.Assert;
abstract class JdbcResultSetTester extends JdbcTester {
protected abstract CloseablePair<Statement, ResultSet> executeQuery(Connection c, String query) throws SQLException;
public void testLifecycle() throws SQLException {
try (Connection c = createConnection()) {
Pair<Statement, ResultSet> p = executeQuery(c, Q2);
Statement s = p.getFirst();
ResultSet rs = p.getSecond();
Assert.assertFalse(rs.isClosed());
Assert.assertSame(s, rs.getStatement());
rs.close();
Assert.assertTrue(rs.isClosed());
// ok to call close() on a closed result set
rs.close();
Assert.assertTrue(rs.isClosed());
}
}
// test that Statement.close() closes its ResultSet
public void testAutoCloseOnStatementClose() throws SQLException {
try (Connection c = createConnection()) {
Pair<Statement, ResultSet> p = executeQuery(c, Q2);
Statement s = p.getFirst();
ResultSet rs = p.getSecond();
Assert.assertFalse(rs.isClosed());
s.close();
Assert.assertTrue(rs.isClosed());
}
}
// test that Connection.close() closes all Statements and their ResultSets
public void testAutoCloseOnConnectionClose() throws SQLException {
Connection c = createConnection();
Pair<Statement, ResultSet> p1 = executeQuery(c, Q2);
Statement s1 = p1.getFirst();
ResultSet rs1 = p1.getSecond();
Assert.assertFalse(rs1.isClosed());
Pair<Statement, ResultSet> p2 = executeQuery(c, Q2);
Statement s2 = p2.getFirst();
ResultSet rs2 = p2.getSecond();
Assert.assertFalse(rs2.isClosed());
c.close();
Assert.assertTrue(rs1.isClosed());
Assert.assertTrue(s1.isClosed());
Assert.assertTrue(rs2.isClosed());
Assert.assertTrue(s2.isClosed());
}
public void testNavigation() throws SQLException {
try (Connection c = createConnection()) {
Pair<Statement, ResultSet> p = executeQuery(c, Q2);
ResultSet rs = p.getSecond();
Assert.assertEquals(ResultSet.TYPE_FORWARD_ONLY, rs.getType());
Assert.assertEquals(ResultSet.FETCH_FORWARD, rs.getFetchDirection());
Assert.assertTrue(rs.isBeforeFirst());
Assert.assertFalse(rs.isFirst());
// Assert.assertFalse(rs.isLast()); -- Not supported
Assert.assertFalse(rs.isAfterLast());
Assert.assertEquals(0, rs.getRow());
for (int r = 1; r <= 9; r++) {
boolean next = rs.next();
Assert.assertTrue(next);
Assert.assertFalse(rs.isBeforeFirst());
Assert.assertEquals(r == 1, rs.isFirst());
Assert.assertFalse(rs.isAfterLast());
Assert.assertEquals(r, rs.getRow());
}
boolean next = rs.next();
Assert.assertFalse(next);
Assert.assertFalse(rs.isBeforeFirst());
Assert.assertFalse(rs.isFirst());
Assert.assertTrue(rs.isAfterLast());
Assert.assertEquals(0, rs.getRow());
next = rs.next();
Assert.assertFalse(next);
rs.close();
assertErrorOnClosed(rs, ResultSet::isBeforeFirst, "isBeforeFirst");
assertErrorOnClosed(rs, ResultSet::isFirst, "isFirst");
assertErrorOnClosed(rs, ResultSet::isAfterLast, "isAfterLast");
assertErrorOnClosed(rs, ResultSet::getRow, "getRow");
assertErrorOnClosed(rs, ResultSet::next, "next");
}
}
public void testColumReadBasic() throws SQLException {
String qProject = IntStream.range(1, 10).mapToObj(i -> String.format("r*10+%d as c%d", i, i))
.collect(Collectors.joining(","));
String q = String.format("select %s from range(1, 2) r order by r", qProject);
try (Connection c = createConnection(); CloseablePair<Statement, ResultSet> p = executeQuery(c, q)) {
ResultSet rs = p.getSecond();
for (int r = 1; rs.next(); r++) {
for (int col = 1; col < 10; col++) {
int expected = r * 10 + col;
Assert.assertEquals(expected, rs.getInt(col));
Assert.assertEquals(expected, rs.getInt("c" + col));
Assert.assertEquals(expected, rs.getInt(rs.findColumn("c" + col)));
}
}
}
}
public void testColumnRead() throws SQLException, IOException {
try (Connection c = createConnection(); CloseablePair<Statement, ResultSet> p = executeQuery(c, Q3)) {
ResultSet rs = p.getSecond();
for (int r = -1; rs.next(); r++) {
int v = r * 2;
verifyReadColumnOfNumericType(rs, 1, Q3_COLUMNS[0], v == 0 ? null : (byte) v);
verifyReadColumnOfNumericType(rs, 2, Q3_COLUMNS[1], v == 0 ? null : (short) v);
verifyReadColumnOfNumericType(rs, 3, Q3_COLUMNS[2], v == 0 ? null : v);
verifyReadColumnOfNumericType(rs, 4, Q3_COLUMNS[3], v == 0 ? null : (long) v);
verifyReadColumnOfNumericType(rs, 5, Q3_COLUMNS[4], v == 0 ? null : (float) v);
verifyReadColumnOfNumericType(rs, 6, Q3_COLUMNS[5], v == 0 ? null : (double) v);
verifyReadColumnOfStringType(rs, 7, Q3_COLUMNS[6], v == 0 ? null : "a" + v);
verifyReadColumnOfBooleanType(rs, 8, Q3_COLUMNS[7], v == 0 ? null : v > 0);
verifyReadColumnOfDateType(rs, 9, Q3_COLUMNS[8], v == 0 ? null : LocalDate.ofEpochDay(v));
verifyReadColumnOfTimeType(rs, 10, Q3_COLUMNS[9], v == 0 ? null : LocalTime.ofSecondOfDay(v + 3));
verifyReadColumnOfDatetimeType(rs, 11, Q3_COLUMNS[10],
v == 0 ? null : LocalDateTime.ofEpochSecond(v, 0, ZoneOffset.UTC));
verifyReadColumnOfYearMonthDurationType(rs, 12, Q3_COLUMNS[11], v == 0 ? null : Period.ofMonths(v));
verifyReadColumnOfDayTimeDurationType(rs, 13, Q3_COLUMNS[12], v == 0 ? null : Duration.ofMillis(v));
verifyReadColumnOfDurationType(rs, 14, Q3_COLUMNS[13], v == 0 ? null : Period.ofMonths(v + 3),
v == 0 ? null : Duration.ofMillis(TimeUnit.SECONDS.toMillis(v + 3)));
verifyReadColumnOfUuidType(rs, 15, Q3_COLUMNS[14],
v == 0 ? null : UUID.fromString("5c848e5c-6b6a-498f-8452-8847a295742" + (v + 3)));
}
}
}
public void testColumnMetadata() throws SQLException {
try (Connection c = createConnection(); CloseablePair<Statement, ResultSet> p = executeQuery(c, Q3)) {
ResultSet rs = p.getSecond();
int expectedColumnCount = Q3_COLUMNS.length;
ResultSetMetaData rsmd = rs.getMetaData();
Assert.assertEquals(expectedColumnCount, rsmd.getColumnCount());
for (int i = 1; i <= expectedColumnCount; i++) {
String expectedColumnName = Q3_COLUMNS[i - 1];
JDBCType expectedColumnTypeJdbc = Q3_COLUMN_TYPES_JDBC[i - 1];
String expectedColumnTypeAdb = Q3_COLUMN_TYPES_ADB[i - 1];
Class<?> expectedColumnTypeJava = Q3_COLUMN_TYPES_JAVA[i - 1];
Assert.assertEquals(i, rs.findColumn(expectedColumnName));
Assert.assertEquals(expectedColumnName, rsmd.getColumnName(i));
Assert.assertEquals(expectedColumnTypeJdbc.getVendorTypeNumber().intValue(), rsmd.getColumnType(i));
Assert.assertEquals(expectedColumnTypeAdb, rsmd.getColumnTypeName(i));
Assert.assertEquals(expectedColumnTypeJava.getName(), rsmd.getColumnClassName(i));
}
}
}
private void verifyGetColumnAsByte(ResultSet rs, int columnIndex, String columnName, Number expectedValue)
throws SQLException {
boolean expectedNull = expectedValue == null;
byte expectedByte = expectedValue == null ? 0 : expectedValue.byteValue();
byte v1 = rs.getByte(columnIndex);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedByte, v1);
byte v2 = rs.getByte(columnName);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedByte, v2);
}
private void verifyGetColumnAsShort(ResultSet rs, int columnIndex, String columnName, Number expectedValue)
throws SQLException {
boolean expectedNull = expectedValue == null;
short expectedShort = expectedValue == null ? 0 : expectedValue.shortValue();
short v1 = rs.getShort(columnIndex);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedShort, v1);
short v2 = rs.getShort(columnName);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedShort, v2);
}
private void verifyGetColumnAsInt(ResultSet rs, int columnIndex, String columnName, Number expectedValue)
throws SQLException {
boolean expectedNull = expectedValue == null;
int expectedInt = expectedValue == null ? 0 : expectedValue.intValue();
int v1 = rs.getInt(columnIndex);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedInt, v1);
int v2 = rs.getInt(columnName);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedInt, v2);
}
private void verifyGetColumnAsLong(ResultSet rs, int columnIndex, String columnName, Number expectedValue)
throws SQLException {
boolean expectedNull = expectedValue == null;
long expectedLong = expectedValue == null ? 0 : expectedValue.longValue();
long v1 = rs.getLong(columnIndex);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedLong, v1);
long v2 = rs.getLong(columnName);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedLong, v2);
}
private void verifyGetColumnAsFloat(ResultSet rs, int columnIndex, String columnName, Number expectedValue)
throws SQLException {
boolean expectedNull = expectedValue == null;
float expectedFloat = expectedValue == null ? 0f : expectedValue.floatValue();
float v1 = rs.getFloat(columnIndex);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedFloat, v1, 0);
float v2 = rs.getFloat(columnName);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedFloat, v2, 0);
}
private void verifyGetColumnAsDouble(ResultSet rs, int columnIndex, String columnName, Number expectedValue)
throws SQLException {
boolean expectedNull = expectedValue == null;
double expectedDouble = expectedValue == null ? 0d : expectedValue.doubleValue();
double v1 = rs.getDouble(columnIndex);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedDouble, v1, 0);
double v2 = rs.getDouble(columnName);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedDouble, v2, 0);
}
private void verifyGetColumnAsDecimal(ResultSet rs, int columnIndex, String columnName, Number expectedValue)
throws SQLException {
boolean expectedNull = expectedValue == null;
BigDecimal expectedDecimal = expectedValue == null ? null : new BigDecimal(expectedValue.toString());
int expectedDecimalScale = expectedValue == null ? 0 : expectedDecimal.scale();
BigDecimal v1 = rs.getBigDecimal(columnIndex, expectedDecimalScale);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedDecimal, v1);
BigDecimal v2 = rs.getBigDecimal(columnName, expectedDecimalScale);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedDecimal, v2);
}
private void verifyGetColumnAsBoolean(ResultSet rs, int columnIndex, String columnName, Boolean expectedValue)
throws SQLException {
boolean expectedNull = expectedValue == null;
boolean expectedBoolean = expectedNull ? false : expectedValue;
boolean v1 = rs.getBoolean(columnIndex);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedBoolean, v1);
boolean v2 = rs.getBoolean(columnName);
Assert.assertEquals(expectedNull, rs.wasNull());
Assert.assertEquals(expectedBoolean, v2);
}
private void verifyGetColumnAsString(ResultSet rs, int columnIndex, String columnName, String expectedValue)
throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, ResultSet::getString, ResultSet::getString);
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, ResultSet::getNString,
ResultSet::getNString);
}
private void verifyGetColumnAsObject(ResultSet rs, int columnIndex, String columnName, Object expectedValue)
throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, ResultSet::getObject, ResultSet::getObject);
}
private <V> void verifyGetColumnAsObject(ResultSet rs, int columnIndex, String columnName, V expectedValue,
Function<Object, V> valueConverter) throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, valueConverter, ResultSet::getObject,
ResultSet::getObject);
}
private <V> void verifyGetColumnAsObject(ResultSet rs, int columnIndex, String columnName, V expectedValue,
Class<V> type) throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, ResultSet::getObject, ResultSet::getObject,
type);
}
private <V, T> void verifyGetColumnAsObject(ResultSet rs, int columnIndex, String columnName, V expectedValue,
Class<T> type, Function<T, V> valueConverter) throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, valueConverter, ResultSet::getObject,
ResultSet::getObject, type);
}
private void verifyGetColumnAsSqlDate(ResultSet rs, int columnIndex, String columnName, LocalDate expectedValue)
throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, java.sql.Date::toLocalDate,
ResultSet::getDate, ResultSet::getDate);
}
private void verifyGetColumnAsSqlTime(ResultSet rs, int columnIndex, String columnName, LocalTime expectedValue)
throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, java.sql.Time::toLocalTime,
ResultSet::getTime, ResultSet::getTime);
}
private void verifyGetColumnAsSqlTimestamp(ResultSet rs, int columnIndex, String columnName,
LocalDateTime expectedValue) throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, java.sql.Timestamp::toLocalDateTime,
ResultSet::getTimestamp, ResultSet::getTimestamp);
}
private <V> void verifyGetColumnGeneric(ResultSet rs, int columnIndex, String columnName, V expectedValue,
GetColumnByIndex<V> columnByIndexAccessor, GetColumnByName<V> columnByNameAccessor) throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, Function.identity(), columnByIndexAccessor,
columnByNameAccessor);
}
private <V, T> void verifyGetColumnGeneric(ResultSet rs, int columnIndex, String columnName, V expectedValue,
Function<T, V> valueConverter, GetColumnByIndex<T> columnByIndexAccessor,
GetColumnByName<T> columnByNameAccessor) throws SQLException {
boolean expectedNull = expectedValue == null;
T v1 = columnByIndexAccessor.get(rs, columnIndex);
Assert.assertEquals(expectedNull, rs.wasNull());
if (expectedNull) {
Assert.assertNull(v1);
} else {
Assert.assertEquals(expectedValue, valueConverter.apply(v1));
}
T v2 = columnByNameAccessor.get(rs, columnName);
Assert.assertEquals(expectedNull, rs.wasNull());
if (expectedNull) {
Assert.assertNull(v2);
} else {
Assert.assertEquals(expectedValue, valueConverter.apply(v2));
}
}
private <V, P> void verifyGetColumnGeneric(ResultSet rs, int columnIndex, String columnName, V expectedValue,
GetColumnByIndexWithParam<V, P> columnByIndexAccessor, GetColumnByNameWithParam<V, P> columnByNameAccessor,
P accessorParamValue) throws SQLException {
verifyGetColumnGeneric(rs, columnIndex, columnName, expectedValue, Function.identity(), columnByIndexAccessor,
columnByNameAccessor, accessorParamValue);
}
private <V, T, P> void verifyGetColumnGeneric(ResultSet rs, int columnIndex, String columnName, V expectedValue,
Function<T, V> valueConverter, GetColumnByIndexWithParam<T, P> columnByIndexAccessor,
GetColumnByNameWithParam<T, P> columnByNameAccessor, P accessorParamValue) throws SQLException {
boolean expectedNull = expectedValue == null;
T v1 = columnByIndexAccessor.get(rs, columnIndex, accessorParamValue);
Assert.assertEquals(expectedNull, rs.wasNull());
if (expectedNull) {
Assert.assertNull(v1);
} else {
Assert.assertEquals(expectedValue, valueConverter.apply(v1));
}
T v2 = columnByNameAccessor.get(rs, columnName, accessorParamValue);
Assert.assertEquals(expectedNull, rs.wasNull());
if (expectedNull) {
Assert.assertNull(v2);
} else {
Assert.assertEquals(expectedValue, valueConverter.apply(v2));
}
}
private void verifyGetColumnAsCharacterStream(ResultSet rs, int columnIndex, String columnName,
char[] expectedValue, GetColumnByIndex<Reader> columnByIndexAccessor,
GetColumnByName<Reader> columnByNameAccessor) throws SQLException, IOException {
boolean expectedNull = expectedValue == null;
try (Reader s1 = columnByIndexAccessor.get(rs, columnIndex)) {
Assert.assertEquals(expectedNull, rs.wasNull());
if (expectedNull) {
Assert.assertNull(s1);
} else {
Assert.assertArrayEquals(expectedValue, IOUtils.toCharArray(s1));
}
}
try (Reader s2 = columnByNameAccessor.get(rs, columnName)) {
Assert.assertEquals(expectedNull, rs.wasNull());
if (expectedNull) {
Assert.assertNull(s2);
} else {
Assert.assertArrayEquals(expectedValue, IOUtils.toCharArray(s2));
}
}
}
private void verifyGetColumnAsBinaryStream(ResultSet rs, int columnIndex, String columnName, byte[] expectedValue,
GetColumnByIndex<InputStream> columnByIndexAccessor, GetColumnByName<InputStream> columnByNameAccessor)
throws SQLException, IOException {
boolean expectedNull = expectedValue == null;
try (InputStream s1 = columnByIndexAccessor.get(rs, columnIndex)) {
Assert.assertEquals(expectedNull, rs.wasNull());
if (expectedNull) {
Assert.assertNull(s1);
} else {
Assert.assertArrayEquals(expectedValue, IOUtils.toByteArray(s1));
}
}
try (InputStream s2 = columnByNameAccessor.get(rs, columnName)) {
Assert.assertEquals(expectedNull, rs.wasNull());
if (expectedNull) {
Assert.assertNull(s2);
} else {
Assert.assertArrayEquals(expectedValue, IOUtils.toByteArray(s2));
}
}
}
private void verifyReadColumnOfNumericType(ResultSet rs, int columnIndex, String columnName,
Number expectedNumericValue) throws SQLException {
String expectedStringValue = expectedNumericValue == null ? null : expectedNumericValue.toString();
Byte expectedByteValue = expectedNumericValue == null ? null : expectedNumericValue.byteValue();
Short expectedShortValue = expectedNumericValue == null ? null : expectedNumericValue.shortValue();
Integer expectedIntValue = expectedNumericValue == null ? null : expectedNumericValue.intValue();
Long expectedLongValue = expectedNumericValue == null ? null : expectedNumericValue.longValue();
Float expectedFloatValue = expectedNumericValue == null ? null : expectedNumericValue.floatValue();
Double expectedDoubleValue = expectedNumericValue == null ? null : expectedNumericValue.doubleValue();
BigDecimal expectedDecimalValue =
expectedNumericValue == null ? null : new BigDecimal(expectedStringValue.replace(".0", ""));
Boolean expectedBooleanValue = toBoolean(expectedNumericValue);
verifyGetColumnAsByte(rs, columnIndex, columnName, expectedNumericValue);
verifyGetColumnAsShort(rs, columnIndex, columnName, expectedNumericValue);
verifyGetColumnAsInt(rs, columnIndex, columnName, expectedNumericValue);
verifyGetColumnAsLong(rs, columnIndex, columnName, expectedNumericValue);
verifyGetColumnAsFloat(rs, columnIndex, columnName, expectedNumericValue);
verifyGetColumnAsDouble(rs, columnIndex, columnName, expectedNumericValue);
verifyGetColumnAsDecimal(rs, columnIndex, columnName, expectedNumericValue);
verifyGetColumnAsBoolean(rs, columnIndex, columnName, expectedBooleanValue);
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedNumericValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedByteValue, Byte.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedShortValue, Short.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedIntValue, Integer.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedLongValue, Long.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedFloatValue, Float.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDoubleValue, Double.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDecimalValue, BigDecimal.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedBooleanValue, Boolean.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue, String.class);
}
private void verifyReadColumnOfStringType(ResultSet rs, int columnIndex, String columnName,
String expectedStringValue) throws SQLException, IOException {
char[] expectedCharArray = expectedStringValue == null ? null : expectedStringValue.toCharArray();
byte[] expectedUtf8Array =
expectedStringValue == null ? null : expectedStringValue.getBytes(StandardCharsets.UTF_8);
byte[] expectedUtf16Array =
expectedStringValue == null ? null : expectedStringValue.getBytes(StandardCharsets.UTF_16);
byte[] expectedAsciiArray =
expectedStringValue == null ? null : expectedStringValue.getBytes(StandardCharsets.US_ASCII);
verifyGetColumnAsCharacterStream(rs, columnIndex, columnName, expectedCharArray, ResultSet::getCharacterStream,
ResultSet::getCharacterStream);
verifyGetColumnAsCharacterStream(rs, columnIndex, columnName, expectedCharArray, ResultSet::getNCharacterStream,
ResultSet::getNCharacterStream);
verifyGetColumnAsBinaryStream(rs, columnIndex, columnName, expectedUtf8Array, ResultSet::getBinaryStream,
ResultSet::getBinaryStream);
verifyGetColumnAsBinaryStream(rs, columnIndex, columnName, expectedUtf16Array, ResultSet::getUnicodeStream,
ResultSet::getUnicodeStream);
verifyGetColumnAsBinaryStream(rs, columnIndex, columnName, expectedAsciiArray, ResultSet::getAsciiStream,
ResultSet::getAsciiStream);
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue, String.class);
}
private void verifyReadColumnOfBooleanType(ResultSet rs, int columnIndex, String columnName,
Boolean expectedBooleanValue) throws SQLException {
Number expectedNumberValue = expectedBooleanValue == null ? null : expectedBooleanValue ? 1 : 0;
String expectedStringValue = expectedBooleanValue == null ? null : Boolean.toString(expectedBooleanValue);
verifyGetColumnAsBoolean(rs, columnIndex, columnName, expectedBooleanValue);
verifyGetColumnAsByte(rs, columnIndex, columnName, expectedNumberValue);
verifyGetColumnAsShort(rs, columnIndex, columnName, expectedNumberValue);
verifyGetColumnAsInt(rs, columnIndex, columnName, expectedNumberValue);
verifyGetColumnAsLong(rs, columnIndex, columnName, expectedNumberValue);
verifyGetColumnAsFloat(rs, columnIndex, columnName, expectedNumberValue);
verifyGetColumnAsDouble(rs, columnIndex, columnName, expectedNumberValue);
verifyGetColumnAsDecimal(rs, columnIndex, columnName, expectedNumberValue);
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedBooleanValue);
}
private void verifyReadColumnOfDateType(ResultSet rs, int columnIndex, String columnName,
LocalDate expectedDateValue) throws SQLException {
LocalDateTime expectedDateTimeValue = expectedDateValue == null ? null : expectedDateValue.atStartOfDay();
String expectedStringValue = expectedDateValue == null ? null : expectedDateValue.toString();
verifyGetColumnAsSqlDate(rs, columnIndex, columnName, expectedDateValue);
verifyGetColumnAsSqlTimestamp(rs, columnIndex, columnName, expectedDateTimeValue);
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateValue, v -> ((java.sql.Date) v).toLocalDate());
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateValue, java.sql.Date.class, Date::toLocalDate);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateTimeValue, java.sql.Timestamp.class,
Timestamp::toLocalDateTime);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateValue, LocalDate.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateTimeValue, LocalDateTime.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue, String.class);
}
private void verifyReadColumnOfTimeType(ResultSet rs, int columnIndex, String columnName,
LocalTime expectedTimeValue) throws SQLException {
String expectedStringValue = expectedTimeValue == null ? null : expectedTimeValue.toString();
verifyGetColumnAsSqlTime(rs, columnIndex, columnName, expectedTimeValue);
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedTimeValue, v -> ((java.sql.Time) v).toLocalTime());
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedTimeValue, java.sql.Time.class,
java.sql.Time::toLocalTime);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedTimeValue, LocalTime.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue, String.class);
}
private void verifyReadColumnOfDatetimeType(ResultSet rs, int columnIndex, String columnName,
LocalDateTime expectedDateTimeValue) throws SQLException {
LocalDate expectedDateValue = expectedDateTimeValue == null ? null : expectedDateTimeValue.toLocalDate();
LocalTime expectedTimeValue = expectedDateTimeValue == null ? null : expectedDateTimeValue.toLocalTime();
String expectedStringValue = expectedDateTimeValue == null ? null : expectedDateTimeValue.toString();
verifyGetColumnAsSqlTimestamp(rs, columnIndex, columnName, expectedDateTimeValue);
verifyGetColumnAsSqlDate(rs, columnIndex, columnName, expectedDateValue);
verifyGetColumnAsSqlTime(rs, columnIndex, columnName, expectedTimeValue);
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateTimeValue,
v -> ((java.sql.Timestamp) v).toLocalDateTime());
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateTimeValue, java.sql.Timestamp.class,
java.sql.Timestamp::toLocalDateTime);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateValue, java.sql.Date.class,
java.sql.Date::toLocalDate);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedTimeValue, java.sql.Time.class,
java.sql.Time::toLocalTime);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateTimeValue, LocalDateTime.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDateValue, LocalDate.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedTimeValue, LocalTime.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue, String.class);
}
private void verifyReadColumnOfYearMonthDurationType(ResultSet rs, int columnIndex, String columnName,
Period expectedPeriodValue) throws SQLException {
String expectedStringValue = expectedPeriodValue == null ? null : expectedPeriodValue.toString();
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedPeriodValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedPeriodValue, Period.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue, String.class);
}
private void verifyReadColumnOfDayTimeDurationType(ResultSet rs, int columnIndex, String columnName,
Duration expectedDurationValue) throws SQLException {
String expectedStringValue = expectedDurationValue == null ? null : expectedDurationValue.toString();
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDurationValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDurationValue, Duration.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue, String.class);
}
private void verifyReadColumnOfDurationType(ResultSet rs, int columnIndex, String columnName,
Period expectedPeriodValue, Duration expectedDurationValue) throws SQLException {
String expectedStringValue = expectedPeriodValue == null && expectedDurationValue == null ? null
: expectedPeriodValue + String.valueOf(expectedDurationValue).substring(1);
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedPeriodValue, Period.class);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedDurationValue, Duration.class);
}
private void verifyReadColumnOfUuidType(ResultSet rs, int columnIndex, String columnName, UUID expectedUuidValue)
throws SQLException {
String expectedStringValue = expectedUuidValue == null ? null : expectedUuidValue.toString();
verifyGetColumnAsString(rs, columnIndex, columnName, expectedStringValue);
verifyGetColumnAsObject(rs, columnIndex, columnName, expectedUuidValue);
}
public void testWrapper() throws SQLException {
try (Connection c = createConnection(); CloseablePair<Statement, ResultSet> p = executeQuery(c, Q2)) {
ResultSet rs = p.getSecond();
Assert.assertTrue(rs.isWrapperFor(ADBResultSet.class));
Assert.assertNotNull(rs.unwrap(ADBResultSet.class));
}
}
interface GetColumnByIndex<R> {
R get(ResultSet rs, int columnIndex) throws SQLException;
}
interface GetColumnByIndexWithParam<R, T> {
R get(ResultSet rs, int columnIndex, T param) throws SQLException;
}
interface GetColumnByName<R> {
R get(ResultSet rs, String columnName) throws SQLException;
}
interface GetColumnByNameWithParam<R, T> {
R get(ResultSet rs, String columnName, T param) throws SQLException;
}
static Boolean toBoolean(Number v) {
if (v == null) {
return null;
}
switch (v.toString()) {
case "0":
case "0.0":
return false;
default:
return true;
}
}
static class JdbcPreparedStatementResultSetTester extends JdbcResultSetTester {
@Override
protected CloseablePair<Statement, ResultSet> executeQuery(Connection c, String query) throws SQLException {
PreparedStatement s = c.prepareStatement(query);
try {
ResultSet rs = s.executeQuery();
return new CloseablePair<>(s, rs);
} catch (SQLException e) {
s.close();
throw e;
}
}
}
static class JdbcStatementResultSetTester extends JdbcResultSetTester {
@Override
protected CloseablePair<Statement, ResultSet> executeQuery(Connection c, String query) throws SQLException {
Statement s = c.createStatement();
try {
ResultSet rs = s.executeQuery(query);
return new CloseablePair<>(s, rs);
} catch (SQLException e) {
s.close();
throw e;
}
}
}
}
| 6,043 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-test/src/test/java/org/apache/asterix/test/jdbc/JdbcConnectionTester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jdbc;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import org.apache.asterix.jdbc.Driver;
import org.apache.asterix.jdbc.core.ADBConnection;
import org.junit.Assert;
class JdbcConnectionTester extends JdbcTester {
public void testGetConnectionViaDriverManager() throws SQLException {
DriverManager.getConnection(testContext.getJdbcUrl()).close();
DriverManager.getConnection(testContext.getJdbcUrl(), null).close();
DriverManager.getConnection(testContext.getJdbcUrl(), new Properties()).close();
DriverManager.getConnection(testContext.getJdbcUrl(), null, null).close();
}
public void testGetConnectionDirect() throws SQLException {
Driver driver = new Driver();
driver.connect(testContext.getJdbcUrl(), null).close();
driver.connect(testContext.getJdbcUrl(), new Properties()).close();
}
public void testLifecycle() throws SQLException {
Connection c = createConnection();
Assert.assertNull(c.getWarnings());
Assert.assertTrue(c.isValid( /*timeout in seconds*/ 30));
Assert.assertFalse(c.isClosed());
c.close();
Assert.assertTrue(c.isClosed());
// ok to call close() on a closed connection
c.close();
Assert.assertTrue(c.isClosed());
// ok to call isValid() on a closed connection
Assert.assertFalse(c.isValid(0));
// errors on a closed connection
assertErrorOnClosed(c, Connection::clearWarnings, "clearWarnings");
assertErrorOnClosed(c, Connection::createStatement, "createStatement");
assertErrorOnClosed(c, Connection::getAutoCommit, "getAutoCommit");
assertErrorOnClosed(c, Connection::getCatalog, "getCatalog");
assertErrorOnClosed(c, Connection::getClientInfo, "getClientInfo");
assertErrorOnClosed(c, Connection::getHoldability, "getHoldability");
assertErrorOnClosed(c, Connection::getMetaData, "getMetadata");
assertErrorOnClosed(c, Connection::getSchema, "getSchema");
assertErrorOnClosed(c, Connection::getTransactionIsolation, "getTransactionIsolation");
assertErrorOnClosed(c, Connection::getWarnings, "getWarnings");
assertErrorOnClosed(c, Connection::getTypeMap, "getTypeMap");
assertErrorOnClosed(c, Connection::isReadOnly, "isReadOnly");
assertErrorOnClosed(c, ci -> ci.prepareStatement("select 1"), "prepareStatement");
}
public void testCatalogSchema() throws SQLException {
try (Connection c = createConnection()) {
Assert.assertEquals(DEFAULT_DATAVERSE_NAME, c.getCatalog());
Assert.assertNull(c.getSchema());
}
try (Connection c = createConnection(METADATA_DATAVERSE_NAME)) {
Assert.assertEquals(METADATA_DATAVERSE_NAME, c.getCatalog());
Assert.assertNull(c.getSchema());
}
try (Connection c = createConnection(); Statement s = c.createStatement()) {
List<String> dataverse = Arrays.asList(getClass().getSimpleName(), "testCatalogSchema");
String dvCanon = getCanonicalDataverseName(dataverse);
String dataset = "ds1";
s.execute(printCreateDataverse(dataverse));
s.execute(printCreateDataset(dataverse, dataset));
s.execute(printInsert(dataverse, dataset, dataGen("x", 1, 2, 3)));
try (Connection c2 = createConnection(dvCanon); Statement s2 = c2.createStatement()) {
Assert.assertEquals(dvCanon, c2.getCatalog());
Assert.assertNull(c.getSchema());
try (ResultSet rs2 =
s2.executeQuery(String.format("select count(*) from %s", printIdentifier(dataset)))) {
Assert.assertTrue(rs2.next());
Assert.assertEquals(3, rs2.getInt(1));
}
} finally {
s.execute(printDropDataverse(dataverse));
}
}
}
// Connection.setReadOnly() hint is currently ignored
// Connection.isReadOnly() always returns 'false'
public void testReadOnlyMode() throws SQLException {
try (Connection c = createConnection()) {
Assert.assertFalse(c.isReadOnly());
c.setReadOnly(true);
Assert.assertFalse(c.isReadOnly());
}
}
public void testWrapper() throws SQLException {
try (Connection c = createConnection()) {
Assert.assertTrue(c.isWrapperFor(ADBConnection.class));
Assert.assertNotNull(c.unwrap(ADBConnection.class));
}
}
}
| 6,044 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBDriverProperty.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
public interface ADBDriverProperty {
String getPropertyName();
Function<String, ?> getValueParser();
Object getDefaultValue();
boolean isHidden();
enum Common implements ADBDriverProperty {
USER("user", Function.identity(), null, false),
PASSWORD("password", Function.identity(), null, false),
CONNECT_TIMEOUT("connectTimeout", Integer::parseInt, null, false),
SOCKET_TIMEOUT("socketTimeout", Integer::parseInt, null, false),
MAX_WARNINGS("maxWarnings", Integer::parseInt, 10, false),
CATALOG_DATAVERSE_MODE("catalogDataverseMode", Integer::parseInt, 1, false), // 1 -> CATALOG, 2 -> CATALOG_SCHEMA
CATALOG_INCLUDES_SCHEMALESS("catalogIncludesSchemaless", Boolean::parseBoolean, false, false),
SQL_COMPAT_MODE("sqlCompatMode", Boolean::parseBoolean, true, false), // Whether user statements are executed in 'SQL-compat' mode
SSL("ssl", Boolean::parseBoolean, false, false),
// Hidden properties
MIN_DRIVER_VERSION("minDriverVersion", Common::parseMinVersion, null, true),
MIN_DATABASE_VERSION("minDatabaseVersion", Common::parseMinVersion, null, true),
ACTIVE_REQUESTS_PATH("activeRequestsPath", Function.identity(), null, true);
private final String propertyName;
private final Function<String, ?> valueParser;
private final Object defaultValue;
private final boolean isHidden;
Common(String propertyName, Function<String, ?> valueParser, Object defaultValue, boolean isHidden) {
this.propertyName = Objects.requireNonNull(propertyName);
this.valueParser = Objects.requireNonNull(valueParser);
this.defaultValue = defaultValue;
this.isHidden = isHidden;
}
@Override
public String getPropertyName() {
return propertyName;
}
@Override
public Function<String, ?> getValueParser() {
return valueParser;
}
@Override
public Object getDefaultValue() {
return defaultValue;
}
@Override
public boolean isHidden() {
return isHidden;
}
@Override
public String toString() {
return getPropertyName();
}
public Object fetchPropertyValue(Map<ADBDriverProperty, Object> properties) {
return properties.getOrDefault(this, defaultValue);
}
public static ADBProductVersion parseMinVersion(String text) {
String[] parts = text.split("\\.");
int major, minor = 0;
switch (parts.length) {
case 2:
minor = Integer.parseInt(parts[1]);
// fall thru to 1
case 1:
major = Integer.parseInt(parts[0]);
break;
default:
throw new IllegalArgumentException(text);
}
return new ADBProductVersion(null, text, major, minor);
}
}
enum CatalogDataverseMode {
CATALOG,
CATALOG_SCHEMA;
static CatalogDataverseMode valueOf(int n) {
switch (n) {
case 1:
return CATALOG;
case 2:
return CATALOG_SCHEMA;
default:
throw new IllegalArgumentException(String.valueOf(n));
}
}
}
}
| 6,045 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBErrorReporter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.io.IOException;
import java.net.URISyntaxException;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLInvalidAuthorizationSpecException;
import java.sql.SQLNonTransientConnectionException;
import java.sql.SQLTimeoutException;
import java.sql.SQLTransientConnectionException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import com.fasterxml.jackson.core.JsonProcessingException;
public class ADBErrorReporter {
public SQLException errorObjectClosed(Class<?> jdbcInterface) {
return new SQLException(String.format("%s is closed", jdbcInterface.getSimpleName()));
}
public SQLException errorObjectClosed(Class<?> jdbcInterface, SQLState sqlState) {
return new SQLException(String.format("%s is closed", jdbcInterface.getSimpleName()), sqlState.code);
}
public SQLFeatureNotSupportedException errorMethodNotSupported(Class<?> jdbcInterface, String methodName) {
return new SQLFeatureNotSupportedException(
String.format("Method %s.%s() is not supported", jdbcInterface.getName(), methodName));
}
public SQLClientInfoException errorClientInfoMethodNotSupported(Class<?> jdbcInterface, String methodName) {
return new SQLClientInfoException(
String.format("Method %s.%s() is not supported", jdbcInterface.getName(), methodName),
Collections.emptyMap());
}
public SQLException errorParameterNotSupported(String parameterName) {
return new SQLException(String.format("Unsupported parameter %s", parameterName));
}
public String warningParameterNotSupported(String parameterName) {
return String.format("Unsupported parameter %s", parameterName);
}
public SQLException errorParameterValueNotSupported(String parameterName) {
return new SQLException(String.format("Unsupported or invalid value of %s parameter", parameterName));
}
public String warningParameterValueNotSupported(String parameterName) {
return String.format("Ignored unsupported or invalid value of %s parameter", parameterName);
}
public SQLException errorUnexpectedDriverVersion(ADBProductVersion version, ADBProductVersion minExpectedVersion) {
return new SQLException(
String.format("Unexpected driver version %s. Expected at least %s.%s", version.getProductVersion(),
minExpectedVersion.getMajorVersion(), minExpectedVersion.getMinorVersion()));
}
public SQLException errorUnexpectedDatabaseVersion(ADBProductVersion version,
ADBProductVersion minExpectedVersion) {
return new SQLException(
String.format("Unexpected database version %s. Expected at least %s.%s", version.getProductVersion(),
minExpectedVersion.getMajorVersion(), minExpectedVersion.getMinorVersion()));
}
public SQLException errorIncompatibleMode(String mode) {
return new SQLException(String.format("Operation cannot be performed in %s mode", mode));
}
public SQLException errorInProtocol() {
return new SQLNonTransientConnectionException("Protocol error", SQLState.CONNECTION_FAILURE.code);
}
public SQLException errorInProtocol(String badValue) {
return new SQLNonTransientConnectionException(String.format("Protocol error. Unexpected %s", badValue),
SQLState.CONNECTION_FAILURE.code);
}
public SQLException errorInProtocol(JsonProcessingException e) {
return new SQLNonTransientConnectionException(String.format("Protocol error. %s", getMessage(e)),
SQLState.CONNECTION_FAILURE.code, e);
}
public SQLException errorInConnection(String badValue) {
return new SQLNonTransientConnectionException(String.format("Connection error. Unexpected %s", badValue),
SQLState.CONNECTION_FAILURE.code);
}
public SQLException errorInConnection(IOException e) {
String message = String.format("Connection error. %s", getMessage(e));
return isTimeoutConnectionError(e) ? errorTimeout(message, e)
: isTransientConnectionError(e)
? new SQLTransientConnectionException(message, SQLState.CONNECTION_FAILURE.code, e)
: new SQLNonTransientConnectionException(message, SQLState.CONNECTION_FAILURE.code, e);
}
public SQLException errorClosingResource(IOException e) {
return new SQLException(String.format("Error closing resources. %s", getMessage(e)), e);
}
public SQLInvalidAuthorizationSpecException errorAuth() {
return new SQLInvalidAuthorizationSpecException("Authentication/authorization error",
SQLState.INVALID_AUTH_SPEC.code);
}
public SQLException errorColumnNotFound(String columnNameOrNumber) {
return new SQLException(String.format("Column %s was not found", columnNameOrNumber));
}
public SQLException errorUnexpectedColumnValue(ADBDatatype type, String columnName) {
return new SQLException(
String.format("Unexpected value of type %s for column %s", type.getTypeName(), columnName));
}
public SQLException errorUnwrapTypeMismatch(Class<?> iface) {
return new SQLException(String.format("Cannot unwrap to %s", iface.getName()));
}
public SQLException errorInvalidStatementCategory() {
return new SQLException("Invalid statement category");
}
public SQLException errorUnexpectedType(Class<?> type) {
return new SQLException(String.format("Unexpected type %s", type.getName()), SQLState.INVALID_DATE_TYPE.code);
}
public SQLException errorUnexpectedType(byte typeTag) {
return new SQLException(String.format("Unexpected type %s", typeTag), SQLState.INVALID_DATE_TYPE.code);
}
public SQLException errorUnexpectedType(ADBDatatype type) {
return new SQLException(String.format("Unexpected type %s", type.getTypeName()),
SQLState.INVALID_DATE_TYPE.code);
}
public SQLException errorInvalidValueOfType(ADBDatatype type) {
return new SQLException(String.format("Invalid value of type %s", type), SQLState.INVALID_DATE_TYPE.code);
}
public SQLException errorNoResult() {
return new SQLException("Result is unavailable");
}
public SQLException errorBadResultSignature() {
return new SQLException("Cannot infer result columns");
}
public SQLException errorNoCurrentRow() {
return new SQLException("No current row", SQLState.INVALID_CURSOR_POSITION.code);
}
public SQLException errorInRequestGeneration(IOException e) {
return new SQLException(String.format("Cannot create request. %s", getMessage(e)), e);
}
public SQLException errorInRequestURIGeneration(URISyntaxException e) {
return new SQLException(String.format("Cannot create request URI. %s", getMessage(e)), e);
}
public SQLException errorInResultHandling(IOException e) {
return new SQLException(String.format("Cannot reading result. %s", getMessage(e)), e);
}
public SQLTimeoutException errorTimeout() {
return new SQLTimeoutException();
}
public SQLTimeoutException errorTimeout(String message, IOException cause) {
return new SQLTimeoutException(message, cause);
}
protected boolean isTimeoutConnectionError(IOException e) {
return false;
}
protected boolean isTransientConnectionError(IOException e) {
return false;
}
protected boolean isInstanceOf(IOException e, List<Class<? extends IOException>> classList) {
if (e != null) {
for (Class<? extends IOException> c : classList) {
if (c.isInstance(e)) {
return true;
}
}
}
return false;
}
public String getMessage(Exception e) {
String message = e != null ? e.getMessage() : null;
return message != null ? message : "";
}
public enum SQLState {
CONNECTION_FAILURE("08001"), // TODO:08006??
CONNECTION_CLOSED("08003"),
INVALID_AUTH_SPEC("28000"),
INVALID_DATE_TYPE("HY004"),
INVALID_CURSOR_POSITION("HY108");
private final String code;
SQLState(String code) {
this.code = Objects.requireNonNull(code);
}
@Override
public String toString() {
return code;
}
}
}
| 6,046 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBProtocolBase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import java.util.function.Function;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.node.ArrayNode;
public abstract class ADBProtocolBase {
public static final String STATEMENT = "statement";
public static final String ARGS = "args";
public static final String MODE = "mode";
public static final String READ_ONLY = "readonly";
public static final String DATAVERSE = "dataverse";
public static final String TIMEOUT = "timeout";
public static final String SIGNATURE = "signature";
public static final String COMPILE_ONLY = "compile-only";
public static final String CLIENT_TYPE = "client-type";
public static final String PLAN_FORMAT = "plan-format";
public static final String MAX_WARNINGS = "max-warnings";
public static final String SQL_COMPAT = "sql-compat";
public static final String CLIENT_CONTEXT_ID = "client_context_id";
public static final String MODE_DEFERRED = "deferred";
public static final String CLIENT_TYPE_JDBC = "jdbc";
public static final String RESULTS = "results";
public static final String FORMAT_LOSSLESS_ADM = "lossless-adm";
public static final String PLAN_FORMAT_STRING = "string";
private static final String DEFAULT_DATAVERSE = "Default";
private static final String DEFAULT_DATABASE = "Default";
private static final String OPTIONAL_TYPE_SUFFIX = "?";
private static final String EXPLAIN_ONLY_RESULT_COLUMN_NAME = "$1";
private static final Pattern DATABASE_VERSION_PATTERN =
Pattern.compile("(?<name>[^/]+)(?:/(?<ver>(?:(?<vermj>\\d+)(?:\\.(?<vermn>\\d+))?)?.*))?");
protected final ADBDriverContext driverContext;
protected final String user;
protected final int maxWarnings;
protected ADBProtocolBase(ADBDriverContext driverContext, Map<ADBDriverProperty, Object> params) {
this.driverContext = Objects.requireNonNull(driverContext);
this.user = (String) ADBDriverProperty.Common.USER.fetchPropertyValue(params);
Number maxWarningsNum = (Number) ADBDriverProperty.Common.MAX_WARNINGS.fetchPropertyValue(params);
this.maxWarnings = Math.max(maxWarningsNum.intValue(), 0);
}
public abstract String connect() throws SQLException;
public abstract void close() throws SQLException;
public abstract boolean ping(int timeoutSeconds);
public abstract QueryServiceResponse submitStatement(String sql, List<?> args, SubmitStatementOptions options)
throws SQLException;
public abstract JsonParser fetchResult(QueryServiceResponse response, SubmitStatementOptions options)
throws SQLException;
public abstract void cancelRunningStatement(UUID executionId) throws SQLException;
public String getUser() {
return user;
}
public ADBDriverContext getDriverContext() {
return driverContext;
}
public ADBErrorReporter getErrorReporter() {
return getDriverContext().getErrorReporter();
}
public Logger getLogger() {
return getDriverContext().getLogger();
}
public SubmitStatementOptions createSubmitStatementOptions() {
return new SubmitStatementOptions();
}
public int getUpdateCount(QueryServiceResponse response) {
// TODO:need to get update count through the response
return isStatementCategory(response, QueryServiceResponse.StatementCategory.UPDATE) ? 1 : 0;
}
public ArrayNode fetchExplainOnlyResult(QueryServiceResponse response, Function<String, String> lineConverter)
throws SQLException {
if (response.results == null || response.results.isEmpty()) {
throw getErrorReporter().errorInProtocol();
}
Object v = response.results.get(0);
if (!(v instanceof String)) {
throw getErrorReporter().errorInProtocol();
}
try (BufferedReader br = new BufferedReader(new StringReader(v.toString()))) {
ArrayNode arrayNode = (ArrayNode) getDriverContext().getGenericObjectReader().createArrayNode();
String line;
while ((line = br.readLine()) != null) {
arrayNode.addObject().put(EXPLAIN_ONLY_RESULT_COLUMN_NAME, lineConverter.apply(line));
}
return arrayNode;
} catch (IOException e) {
throw getErrorReporter().errorInResultHandling(e);
}
}
public boolean isStatementCategory(QueryServiceResponse response, QueryServiceResponse.StatementCategory category) {
return response.plans != null && category.equals(response.plans.statementCategory);
}
public SQLException getErrorIfExists(QueryServiceResponse response) {
if (response.errors != null && !response.errors.isEmpty()) {
QueryServiceResponse.Message err = response.errors.get(0);
return new SQLException(err.msg, null, err.code);
}
return null;
}
public List<QueryServiceResponse.Message> getWarningIfExists(QueryServiceResponse response) {
return response.warnings != null && !response.warnings.isEmpty() ? response.warnings : null;
}
public SQLWarning createSQLWarning(List<QueryServiceResponse.Message> warnings) {
SQLWarning sqlWarning = null;
ListIterator<QueryServiceResponse.Message> i = warnings.listIterator(warnings.size());
while (i.hasPrevious()) {
QueryServiceResponse.Message w = i.previous();
SQLWarning sw = new SQLWarning(w.msg, null, w.code);
if (sqlWarning != null) {
sw.setNextWarning(sqlWarning);
}
sqlWarning = sw;
}
return sqlWarning;
}
public List<ADBColumn> getColumns(QueryServiceResponse response) throws SQLException {
if (isExplainOnly(response)) {
return Collections.singletonList(new ADBColumn(EXPLAIN_ONLY_RESULT_COLUMN_NAME, ADBDatatype.STRING, false));
}
QueryServiceResponse.Signature signature = response.signature;
if (signature == null) {
throw getErrorReporter().errorInProtocol();
}
List<String> nameList = signature.name;
List<String> typeList = signature.type;
if (nameList == null || nameList.isEmpty() || typeList == null || typeList.isEmpty()) {
throw getErrorReporter().errorBadResultSignature();
}
int count = nameList.size();
List<ADBColumn> result = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
String columnName = nameList.get(i);
String typeName = typeList.get(i);
boolean optional = false;
if (typeName.endsWith(OPTIONAL_TYPE_SUFFIX)) {
optional = true;
typeName = typeName.substring(0, typeName.length() - OPTIONAL_TYPE_SUFFIX.length());
}
ADBDatatype columnType = ADBDatatype.findByTypeName(typeName);
if (columnType == null) {
throw getErrorReporter().errorBadResultSignature();
}
result.add(new ADBColumn(columnName, columnType, optional));
}
return result;
}
public boolean isExplainOnly(QueryServiceResponse response) {
return response.plans != null && Boolean.TRUE.equals(response.plans.explainOnly);
}
public int getStatementParameterCount(QueryServiceResponse response) throws SQLException {
QueryServiceResponse.Plans plans = response.plans;
if (plans == null) {
throw getErrorReporter().errorInProtocol();
}
if (plans.statementParameters == null) {
return 0;
}
int paramPos = 0;
for (Object param : plans.statementParameters) {
if (param instanceof Number) {
paramPos = Math.max(paramPos, ((Number) param).intValue());
} else {
throw getErrorReporter().errorParameterNotSupported(String.valueOf(param));
}
}
return paramPos;
}
public ADBProductVersion parseDatabaseVersion(String serverVersion) {
String dbProductName = null;
String dbProductVersion = null;
int dbMajorVersion = 0;
int dbMinorVersion = 0;
if (serverVersion != null) {
Matcher m = DATABASE_VERSION_PATTERN.matcher(serverVersion);
if (m.matches()) {
dbProductName = m.group("name");
dbProductVersion = m.group("ver");
String vermj = m.group("vermj");
String vermn = m.group("vermn");
if (vermj != null) {
try {
dbMajorVersion = Integer.parseInt(vermj);
} catch (NumberFormatException e) {
// ignore (overflow)
}
}
if (vermn != null) {
try {
dbMinorVersion = Integer.parseInt(vermn);
} catch (NumberFormatException e) {
// ignore (overflow)
}
}
}
}
return new ADBProductVersion(dbProductName, dbProductVersion, dbMajorVersion, dbMinorVersion);
}
public String getDefaultDataverse() {
return DEFAULT_DATAVERSE;
}
public String getDefaultDatabase() {
return DEFAULT_DATABASE;
}
public static class SubmitStatementOptions {
public String dataverseName;
public int timeoutSeconds;
public boolean forceReadOnly;
public boolean compileOnly;
public boolean sqlCompatMode;
public UUID executionId;
}
public static class QueryServiceResponse {
public Status status;
public Plans plans;
public Signature signature;
public String handle;
public List<?> results; // currently only used for EXPLAIN results
public List<Message> errors;
public List<Message> warnings;
public enum Status {
RUNNING,
SUCCESS,
TIMEOUT,
FAILED,
FATAL
}
public enum StatementCategory {
QUERY,
UPDATE,
DDL,
PROCEDURE
}
public static class Signature {
List<String> name;
List<String> type;
}
public static class Plans {
StatementCategory statementCategory;
List<Object> statementParameters;
Boolean explainOnly;
}
public static class Message {
int code;
String msg;
}
}
}
| 6,047 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBDriverContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.util.Objects;
import java.util.logging.Logger;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.module.SimpleModule;
public class ADBDriverContext {
private final ADBProductVersion driverVersion;
private final ADBErrorReporter errorReporter;
private final Logger logger;
private final ObjectReader genericObjectReader;
private final ObjectWriter genericObjectWriter;
private final ObjectReader admFormatObjectReader;
private final ObjectWriter admFormatObjectWriter;
public ADBDriverContext(ADBProductVersion driverVersion, ADBErrorReporter errorReporter, Logger logger) {
this.driverVersion = Objects.requireNonNull(driverVersion);
this.errorReporter = Objects.requireNonNull(errorReporter);
this.logger = Objects.requireNonNull(logger);
ObjectMapper genericObjectMapper = createGenericObjectMapper();
this.genericObjectReader = genericObjectMapper.reader();
this.genericObjectWriter = genericObjectMapper.writer();
ObjectMapper admFormatObjectMapper = createADMFormatObjectMapper();
this.admFormatObjectReader = admFormatObjectMapper.reader();
this.admFormatObjectWriter = admFormatObjectMapper.writer();
}
protected ObjectMapper createGenericObjectMapper() {
ObjectMapper om = new ObjectMapper();
om.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.NON_PRIVATE);
// serialization
om.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
// deserialization
om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
om.configure(DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL, true);
om.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS);
return om;
}
protected ObjectMapper createADMFormatObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
SimpleModule serdeModule = new SimpleModule(getClass().getName());
ADBStatement.configureADMFormatSerialization(serdeModule);
ADBRowStore.configureADMFormatDeserialization(mapper, serdeModule);
mapper.registerModule(serdeModule);
return mapper;
}
public ADBErrorReporter getErrorReporter() {
return errorReporter;
}
public Logger getLogger() {
return logger;
}
public ObjectReader getGenericObjectReader() {
return genericObjectReader;
}
public ObjectWriter getGenericObjectWriter() {
return genericObjectWriter;
}
public ObjectReader getAdmFormatObjectReader() {
return admFormatObjectReader;
}
public ObjectWriter getAdmFormatObjectWriter() {
return admFormatObjectWriter;
}
public ADBProductVersion getDriverVersion() {
return driverVersion;
}
}
| 6,048 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBParameterMetaData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.sql.ParameterMetaData;
import java.util.Objects;
public class ADBParameterMetaData extends ADBWrapperSupport implements ParameterMetaData {
protected final ADBPreparedStatement statement;
protected final int parameterCount;
public ADBParameterMetaData(ADBPreparedStatement statement, int parameterCount) {
this.statement = Objects.requireNonNull(statement);
this.parameterCount = parameterCount;
}
@Override
public int getParameterCount() {
return parameterCount;
}
@Override
public int getParameterMode(int parameterIndex) {
return parameterModeIn;
}
@Override
public int getParameterType(int parameterIndex) {
return ADBDatatype.ANY.getJdbcType().getVendorTypeNumber();
}
@Override
public String getParameterTypeName(int parameterIndex) {
return ADBDatatype.ANY.getTypeName();
}
@Override
public String getParameterClassName(int parameterIndex) {
return Object.class.getName();
}
@Override
public int isNullable(int parameterIndex) {
return parameterNullable;
}
@Override
public boolean isSigned(int parameterIndex) {
return false;
}
@Override
public int getPrecision(int parameterIndex) {
return 0;
}
@Override
public int getScale(int parameterIndex) {
return 0;
}
@Override
protected ADBErrorReporter getErrorReporter() {
return statement.getErrorReporter();
}
}
| 6,049 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBDatabaseMetaData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.RowIdLifetime;
import java.sql.SQLException;
import java.util.Objects;
public class ADBDatabaseMetaData extends ADBWrapperSupport implements DatabaseMetaData {
/*
* See org.apache.asterix.metadata.utils.MetadataConstants.METADATA_OBJECT_NAME_LENGTH_LIMIT_UTF8
*/
private static final int METADATA_OBJECT_NAME_LENGTH_LIMIT_UTF8 = 251;
protected final ADBMetaStatement metaStatement;
protected final ADBProductVersion driverVersion;
protected final ADBProductVersion databaseVersion;
public ADBDatabaseMetaData(ADBMetaStatement metaStatement, ADBProductVersion databaseVersion) {
this.metaStatement = Objects.requireNonNull(metaStatement);
this.driverVersion = metaStatement.connection.protocol.getDriverContext().getDriverVersion();
this.databaseVersion = databaseVersion;
}
// Driver name and version
@Override
public String getDriverName() {
return driverVersion.getProductName();
}
@Override
public String getDriverVersion() {
return driverVersion.getProductVersion();
}
@Override
public int getDriverMajorVersion() {
return driverVersion.getMajorVersion();
}
@Override
public int getDriverMinorVersion() {
return driverVersion.getMinorVersion();
}
@Override
public int getJDBCMajorVersion() {
return ADBDriverBase.JDBC_MAJOR_VERSION;
}
@Override
public int getJDBCMinorVersion() {
return ADBDriverBase.JDBC_MINOR_VERSION;
}
// Database name and version
@Override
public String getDatabaseProductName() {
return databaseVersion.getProductName();
}
@Override
public String getDatabaseProductVersion() {
return databaseVersion.getProductVersion();
}
@Override
public int getDatabaseMajorVersion() {
return databaseVersion.getMajorVersion();
}
@Override
public int getDatabaseMinorVersion() {
return databaseVersion.getMinorVersion();
}
// Database objects
// Catalogs and schemas
@Override
public ADBResultSet getCatalogs() throws SQLException {
return metaStatement.executeGetCatalogsQuery();
}
@Override
public int getMaxCatalogNameLength() {
return METADATA_OBJECT_NAME_LENGTH_LIMIT_UTF8;
}
@Override
public ADBResultSet getSchemas() throws SQLException {
return metaStatement.executeGetSchemasQuery();
}
@Override
public ADBResultSet getSchemas(String catalog, String schemaPattern) throws SQLException {
return metaStatement.executeGetSchemasQuery(catalog, schemaPattern);
}
@Override
public int getMaxSchemaNameLength() {
return METADATA_OBJECT_NAME_LENGTH_LIMIT_UTF8;
}
//TODO:document
protected boolean supportsCatalogsInStatements() {
return false;
}
protected boolean supportsSchemasInStatements() {
return false;
}
// Tables
@Override
public ADBResultSet getTableTypes() throws SQLException {
return metaStatement.executeGetTableTypesQuery();
}
@Override
public ADBResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types)
throws SQLException {
return metaStatement.executeGetTablesQuery(catalog, schemaPattern, tableNamePattern, types);
}
@Override
public ADBResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern)
throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public int getMaxTableNameLength() {
return METADATA_OBJECT_NAME_LENGTH_LIMIT_UTF8;
}
@Override
public int getMaxColumnsInTable() {
return 0;
}
@Override
public int getMaxRowSize() {
return 0;
}
@Override
public boolean doesMaxRowSizeIncludeBlobs() {
return true;
}
// Columns
@Override
public ADBResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern,
String columnNamePattern) throws SQLException {
return metaStatement.executeGetColumnsQuery(catalog, schemaPattern, tableNamePattern, columnNamePattern);
}
@Override
public ADBResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern,
String columnNamePattern) throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public ADBResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public ADBResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable)
throws SQLException {
// TODO:primary keys?
return metaStatement.executeEmptyResultQuery();
}
@Override
public int getMaxColumnNameLength() {
return 0;
}
// Keys
@Override
public ADBResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException {
return metaStatement.executeGetPrimaryKeysQuery(catalog, schema, table);
}
@Override
public ADBResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException {
return metaStatement.executeGetImportedKeysQuery(catalog, schema, table);
}
@Override
public ADBResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException {
return metaStatement.executeGetExportedKeysQuery(catalog, schema, table);
}
@Override
public ADBResultSet getCrossReference(String parentCatalog, String parentSchema, String parentTable,
String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException {
return metaStatement.executeCrossReferenceQuery(parentCatalog, parentSchema, parentTable, foreignCatalog,
foreignSchema, foreignTable);
}
// Indexes
@Override
public ADBResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate)
throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public int getMaxColumnsInIndex() {
return 0;
}
@Override
public int getMaxIndexLength() {
return 0;
}
// Datatypes
@Override
public ADBResultSet getTypeInfo() throws SQLException {
return metaStatement.executeGetTypeInfoQuery();
}
@Override
public ADBResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern)
throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public ADBResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types)
throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public ADBResultSet getAttributes(String catalog, String schemaPattern, String typeNamePattern,
String attributeNamePattern) throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public RowIdLifetime getRowIdLifetime() {
return RowIdLifetime.ROWID_UNSUPPORTED;
}
@Override
public long getMaxLogicalLobSize() {
return 0;
}
@Override
public boolean supportsRefCursors() {
return false;
}
// User-defined functions and procedures
@Override
public boolean supportsStoredProcedures() {
return false;
}
@Override
public ADBResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern)
throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public ADBResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern,
String columnNamePattern) throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public ADBResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern)
throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public ADBResultSet getProcedureColumns(String catalog, String schemaPattern, String procedureNamePattern,
String columnNamePattern) throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public int getMaxProcedureNameLength() {
return METADATA_OBJECT_NAME_LENGTH_LIMIT_UTF8;
}
// Security
@Override
public ADBResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern)
throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
@Override
public ADBResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern)
throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
// Other database objects
@Override
public ADBResultSet getClientInfoProperties() throws SQLException {
return metaStatement.executeEmptyResultQuery();
}
// SQL dialect: general
@Override
public boolean supportsMinimumSQLGrammar() {
return true;
}
@Override
public boolean supportsCoreSQLGrammar() {
return true;
}
@Override
public boolean supportsExtendedSQLGrammar() {
return false;
}
@Override
public boolean supportsANSI92EntryLevelSQL() {
return true;
}
@Override
public boolean supportsANSI92IntermediateSQL() {
return false;
}
@Override
public boolean supportsANSI92FullSQL() {
return false;
}
@Override
public String getSQLKeywords() {
// keywords that are not also SQL:2003 keywords
return "adapter,apply,asc,autogenerated,btree,closed,compaction,compact,correlate,collection,dataset,"
+ "dataverse,definition,desc,disconnect,div,explain,enforced,every,feed,flatten,fulltext,hints,if,"
+ "index,ingestion,internal,keyword,key,known,letting,let,limit,load,missing,mod,nodegroup,ngram,"
+ "offset,path,policy,pre-sorted,raw,refresh,returning,rtree,run,satisfies,secondary,some,stop,"
+ "synonym,temporary,type,upsert,use,view,write";
}
@Override
public String getCatalogTerm() {
return "catalog";
}
@Override
public String getSchemaTerm() {
return "schema";
}
@Override
public String getProcedureTerm() {
return "procedure";
}
@Override
public int getMaxStatementLength() {
return 0;
}
// SQL dialect: identifiers
@Override
public String getExtraNameCharacters() {
return "";
}
@Override
public String getIdentifierQuoteString() {
return "`";
}
@Override
public boolean supportsMixedCaseIdentifiers() {
return true;
}
@Override
public boolean storesMixedCaseIdentifiers() {
return false;
}
@Override
public boolean supportsMixedCaseQuotedIdentifiers() {
return true;
}
@Override
public boolean storesMixedCaseQuotedIdentifiers() {
return false;
}
@Override
public boolean storesLowerCaseIdentifiers() {
return false;
}
@Override
public boolean storesLowerCaseQuotedIdentifiers() {
return false;
}
@Override
public boolean storesUpperCaseIdentifiers() {
return false;
}
@Override
public boolean storesUpperCaseQuotedIdentifiers() {
return false;
}
// SQL dialect: literals and parameters
@Override
public int getMaxBinaryLiteralLength() {
return 0;
}
@Override
public int getMaxCharLiteralLength() {
return 0;
}
@Override
public boolean supportsNamedParameters() {
// Procedures (CallableStatement) are not supported
return false;
}
// SQL dialect: functions and operators
@Override
public String getNumericFunctions() {
// NOTE: JDBC escape clause is not yet supported
// "add,div,mod,mult,neg,sub,abs,acos,asin,atan,atan2,ceil,cos,deg,degrees,e,exp,ln,log,floor,inf,nan,neginf,pi,posinf,power,rad,radians,random,round,sign,sin,sqrt,tan,trunc";
return "";
}
@Override
public String getStringFunctions() {
// NOTE: JDBC escape clause is not yet supported
// "contains,initcap,length,lower,ltrim,position,pos,regex_contains,regex_like,regex_position,regex_pos,regex_replace,repeat,replace,rtrim,split,substr,title,trim,upper";
return "";
}
@Override
public String getSystemFunctions() {
// NOTE: JDBC escape clause is not yet supported
return "";
}
@Override
public String getTimeDateFunctions() {
return "current_date,current_time,current_datetime";
}
@Override
public boolean supportsConvert() {
return false;
}
@Override
public boolean supportsConvert(int fromType, int toType) {
return false;
}
@Override
public String getSearchStringEscape() {
return "\\";
}
@Override
public boolean supportsLikeEscapeClause() {
return false;
}
@Override
public boolean nullPlusNonNullIsNull() {
return false;
}
@Override
public boolean supportsStoredFunctionsUsingCallSyntax() {
return false;
}
// SQL dialect: SELECT clause
@Override
public int getMaxColumnsInSelect() {
return 0;
}
@Override
public boolean supportsColumnAliasing() {
return true;
}
// SQL dialect: FROM clause
@Override
public boolean allTablesAreSelectable() {
return true;
}
@Override
public int getMaxTablesInSelect() {
return 0;
}
@Override
public boolean isCatalogAtStart() {
return true;
}
@Override
public String getCatalogSeparator() {
return ".";
}
@Override
public boolean supportsTableCorrelationNames() {
return true;
}
@Override
public boolean supportsDifferentTableCorrelationNames() {
return true;
}
// SQL dialect: JOIN clause
@Override
public boolean supportsOuterJoins() {
return true;
}
@Override
public boolean supportsLimitedOuterJoins() {
return true;
}
@Override
public boolean supportsFullOuterJoins() {
return false;
}
// SQL dialect: ORDER BY clause
@Override
public int getMaxColumnsInOrderBy() {
return 0;
}
@Override
public boolean supportsOrderByUnrelated() {
return true;
}
@Override
public boolean supportsExpressionsInOrderBy() {
return true;
}
@Override
public boolean nullsAreSortedHigh() {
return false;
}
@Override
public boolean nullsAreSortedLow() {
return true;
}
@Override
public boolean nullsAreSortedAtStart() {
return false;
}
@Override
public boolean nullsAreSortedAtEnd() {
return false;
}
// SQL dialect: GROUP BY clause
@Override
public boolean supportsGroupBy() {
return true;
}
@Override
public boolean supportsGroupByUnrelated() {
return true;
}
@Override
public boolean supportsGroupByBeyondSelect() {
return true;
}
@Override
public int getMaxColumnsInGroupBy() {
return 0;
}
// SQL dialect: Subquery
@Override
public boolean supportsSubqueriesInComparisons() {
return true;
}
@Override
public boolean supportsSubqueriesInExists() {
return true;
}
@Override
public boolean supportsSubqueriesInIns() {
return true;
}
@Override
public boolean supportsSubqueriesInQuantifieds() {
return true;
}
@Override
public boolean supportsCorrelatedSubqueries() {
return true;
}
// SQL dialect: Set operations
@Override
public boolean supportsUnion() {
return false;
}
@Override
public boolean supportsUnionAll() {
return true;
}
// SQL dialect: DML statements
@Override
public boolean supportsCatalogsInDataManipulation() {
return supportsCatalogsInStatements();
}
@Override
public boolean supportsSchemasInDataManipulation() {
return supportsSchemasInStatements();
}
@Override
public boolean supportsPositionedDelete() {
return false;
}
@Override
public boolean supportsPositionedUpdate() {
return false;
}
@Override
public boolean supportsSelectForUpdate() {
return false;
}
// SQL dialect: DDL statements
// DDL: CREATE DATASET
@Override
public boolean supportsCatalogsInTableDefinitions() {
return supportsCatalogsInStatements();
}
@Override
public boolean supportsSchemasInTableDefinitions() {
return supportsSchemasInStatements();
}
@Override
public boolean supportsNonNullableColumns() {
return true;
}
@Override
public boolean supportsAlterTableWithAddColumn() {
return false;
}
@Override
public boolean supportsAlterTableWithDropColumn() {
return false;
}
// DDL: CREATE INDEX
@Override
public boolean supportsCatalogsInIndexDefinitions() {
return supportsCatalogsInStatements();
}
@Override
public boolean supportsSchemasInIndexDefinitions() {
return supportsSchemasInStatements();
}
// DDL: GRANT / REVOKE (not supported)
@Override
public boolean supportsCatalogsInPrivilegeDefinitions() {
return supportsCatalogsInStatements();
}
@Override
public boolean supportsSchemasInPrivilegeDefinitions() {
return supportsSchemasInStatements();
}
@Override
public boolean supportsIntegrityEnhancementFacility() {
return false;
}
// SQL dialect: User-defined functions and procedures
@Override
public boolean allProceduresAreCallable() {
return false;
}
@Override
public boolean supportsCatalogsInProcedureCalls() {
return supportsCatalogsInStatements();
}
@Override
public boolean supportsSchemasInProcedureCalls() {
return supportsSchemasInStatements();
}
// Transactions
@Override
public boolean supportsTransactions() {
return false;
}
@Override
public boolean supportsMultipleTransactions() {
return true;
}
@Override
public int getDefaultTransactionIsolation() {
return Connection.TRANSACTION_READ_COMMITTED;
}
@Override
public boolean supportsTransactionIsolationLevel(int level) {
return Connection.TRANSACTION_READ_COMMITTED == level;
}
@Override
public boolean supportsDataDefinitionAndDataManipulationTransactions() {
return false;
}
@Override
public boolean supportsDataManipulationTransactionsOnly() {
return true;
}
@Override
public boolean supportsOpenStatementsAcrossCommit() {
return true;
}
@Override
public boolean supportsOpenCursorsAcrossCommit() {
return false;
}
@Override
public boolean supportsOpenStatementsAcrossRollback() {
return true;
}
@Override
public boolean supportsOpenCursorsAcrossRollback() {
return false;
}
@Override
public boolean dataDefinitionCausesTransactionCommit() {
return false;
}
@Override
public boolean dataDefinitionIgnoredInTransactions() {
return false;
}
@Override
public boolean supportsSavepoints() {
return false;
}
@Override
public boolean autoCommitFailureClosesAllResultSets() {
return false;
}
// Connection
@Override
public Connection getConnection() throws SQLException {
return metaStatement.connection;
}
@Override
public int getMaxConnections() {
return 0;
}
@Override
public String getURL() {
return metaStatement.connection.url;
}
@Override
public String getUserName() {
return metaStatement.connection.protocol.getUser();
}
@Override
public int getMaxUserNameLength() {
return 0;
}
// Statement
@Override
public int getMaxStatements() {
return 0;
}
@Override
public boolean supportsStatementPooling() {
return false;
}
@Override
public boolean supportsBatchUpdates() {
return false;
}
@Override
public boolean supportsGetGeneratedKeys() {
return false;
}
@Override
public boolean generatedKeyAlwaysReturned() {
return false;
}
@Override
public boolean supportsMultipleResultSets() {
return false;
}
@Override
public boolean supportsMultipleOpenResults() {
return false;
}
@Override
public boolean locatorsUpdateCopy() {
return false;
}
// ResultSet
@Override
public boolean supportsResultSetType(int type) {
return type == ResultSet.TYPE_FORWARD_ONLY;
}
@Override
public boolean supportsResultSetConcurrency(int type, int concurrency) {
return type == ResultSet.TYPE_FORWARD_ONLY && concurrency == ResultSet.CONCUR_READ_ONLY;
}
@Override
public boolean supportsResultSetHoldability(int holdability) {
return holdability == ADBResultSet.RESULT_SET_HOLDABILITY;
}
@Override
public int getResultSetHoldability() {
return ADBResultSet.RESULT_SET_HOLDABILITY;
}
@Override
public boolean ownInsertsAreVisible(int type) {
return false;
}
@Override
public boolean othersInsertsAreVisible(int type) {
return false;
}
@Override
public boolean insertsAreDetected(int type) {
return false;
}
@Override
public boolean ownUpdatesAreVisible(int type) {
return false;
}
@Override
public boolean othersUpdatesAreVisible(int type) {
return false;
}
@Override
public boolean updatesAreDetected(int type) {
return false;
}
@Override
public boolean ownDeletesAreVisible(int type) {
return false;
}
@Override
public boolean othersDeletesAreVisible(int type) {
return false;
}
@Override
public boolean deletesAreDetected(int type) {
return false;
}
@Override
public int getMaxCursorNameLength() {
return 0;
}
// Miscellaneous
@Override
public boolean isReadOnly() {
return false;
}
@Override
public boolean usesLocalFiles() {
return false;
}
@Override
public boolean usesLocalFilePerTable() {
return false;
}
// Errors and warnings
@Override
public int getSQLStateType() {
return sqlStateSQL;
}
@Override
protected ADBErrorReporter getErrorReporter() {
return metaStatement.getErrorReporter();
}
}
| 6,050 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBWrapperSupport.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.sql.SQLException;
import java.sql.Wrapper;
public abstract class ADBWrapperSupport implements Wrapper {
@Override
public final boolean isWrapperFor(Class<?> iface) {
return iface.isInstance(this);
}
@Override
public final <T> T unwrap(Class<T> iface) throws SQLException {
if (!iface.isInstance(this)) {
throw getErrorReporter().errorUnwrapTypeMismatch(iface);
}
return iface.cast(this);
}
protected abstract ADBErrorReporter getErrorReporter();
}
| 6,051 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBResultSetMetaData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class ADBResultSetMetaData extends ADBWrapperSupport implements ResultSetMetaData {
final ADBStatement statement;
private final List<ADBColumn> columns;
private final Map<String, Integer> indexByName;
public ADBResultSetMetaData(ADBStatement statement, List<ADBColumn> columns) {
this.statement = Objects.requireNonNull(statement);
this.columns = columns != null ? columns : Collections.emptyList();
this.indexByName = createIndexByName(this.columns);
}
@Override
public int getColumnCount() {
return columns.size();
}
@Override
public String getColumnName(int columnNumber) throws SQLException {
return getColumnByNumber(columnNumber).getName();
}
@Override
public String getColumnLabel(int columnNumber) throws SQLException {
return getColumnByNumber(columnNumber).getName();
}
@Override
public int getColumnType(int columnNumber) throws SQLException {
return getColumnByNumber(columnNumber).getType().getJdbcType().getVendorTypeNumber();
}
@Override
public String getColumnTypeName(int columnNumber) throws SQLException {
return getColumnByNumber(columnNumber).getType().getTypeName();
}
@Override
public String getColumnClassName(int columnNumber) throws SQLException {
return getColumnByNumber(columnNumber).getType().getJavaClass().getName();
}
@Override
public int getColumnDisplaySize(int columnNumber) {
// TODO:based on type
return 1;
}
@Override
public int getPrecision(int columnNumber) {
// TODO:based on type
return 0;
}
@Override
public int getScale(int columnNumber) {
return 0;
}
@Override
public boolean isAutoIncrement(int columnNumber) {
return false;
}
@Override
public boolean isCaseSensitive(int columnNumber) {
return false;
}
@Override
public boolean isCurrency(int columnNumber) {
return false;
}
@Override
public int isNullable(int columnNumber) throws SQLException {
return getColumnByNumber(columnNumber).isOptional() ? columnNullable : columnNoNulls;
}
@Override
public boolean isSearchable(int columnNumber) {
return true;
}
@Override
public boolean isSigned(int columnNumber) {
return false;
}
@Override
public boolean isReadOnly(int columnNumber) {
return false;
}
@Override
public boolean isWritable(int columnNumber) {
return false;
}
@Override
public boolean isDefinitelyWritable(int columnNumber) {
return false;
}
@Override
public String getCatalogName(int columnNumber) {
return "";
}
@Override
public String getSchemaName(int columnNumber) {
return "";
}
@Override
public String getTableName(int columnNumber) {
return "";
}
// Helpers
private ADBColumn getColumnByNumber(int columnNumber) throws SQLException {
return getColumnByIndex(toColumnIndex(columnNumber));
}
private int toColumnIndex(int columnNumber) throws SQLException {
boolean ok = 0 < columnNumber && columnNumber <= columns.size();
if (!ok) {
throw getErrorReporter().errorParameterValueNotSupported("columnNumber");
}
return columnNumber - 1;
}
ADBColumn getColumnByIndex(int idx) {
return columns.get(idx);
}
int findColumnIndexByName(String columnName) {
Integer idx = indexByName.get(columnName);
return idx != null ? idx : -1;
}
private static Map<String, Integer> createIndexByName(List<ADBColumn> columns) {
int n = columns.size();
switch (n) {
case 0:
return Collections.emptyMap();
case 1:
return Collections.singletonMap(columns.get(0).getName(), 0);
default:
Map<String, Integer> m = new HashMap<>();
for (int i = 0; i < n; i++) {
m.put(columns.get(i).getName(), i);
}
return m;
}
}
@Override
protected ADBErrorReporter getErrorReporter() {
return statement.getErrorReporter();
}
}
| 6,052 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBMetaStatement.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.sql.DatabaseMetaData;
import java.sql.JDBCType;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class ADBMetaStatement extends ADBStatement {
public static final String SCHEMALESS = "SCHEMALESS";
public static final String TABLE = "TABLE";
public static final String VIEW = "VIEW";
private static final String PK_NAME_SUFFIX = "_pk";
private static final String FK_NAME_SUFFIX = "_fk";
public ADBMetaStatement(ADBConnection connection) {
super(connection);
}
protected void populateQueryProlog(StringBuilder sql, String comment) {
if (comment != null) {
sql.append("/* ").append(comment).append(" */\n");
}
//sql.append("set `compiler.min.memory.allocation` 'false';\n");
}
protected ADBResultSet executeGetCatalogsQuery() throws SQLException {
checkClosed();
StringBuilder sql = new StringBuilder(256);
populateQueryProlog(sql, "JDBC-GetCatalogs");
sql.append("select TABLE_CAT ");
sql.append("from Metadata.`Dataverse` ");
switch (connection.catalogDataverseMode) {
case CATALOG:
if (connection.isDatabaseEntitySupported()) {
sql.append("let TABLE_CAT = DatabaseName || '/' || DataverseName ");
} else {
sql.append("let TABLE_CAT = DataverseName ");
}
break;
case CATALOG_SCHEMA:
if (connection.isDatabaseEntitySupported()) {
sql.append("let TABLE_CAT = DatabaseName ");
} else {
sql.append("let name = decode_dataverse_name(DataverseName), ");
sql.append("TABLE_CAT = name[0] ");
sql.append("where (array_length(name) between 1 and 2) ");
}
sql.append("group by TABLE_CAT ");
break;
default:
throw new IllegalStateException();
}
sql.append("order by TABLE_CAT");
return executeQueryImpl(sql.toString(), null);
}
protected ADBResultSet executeGetSchemasQuery() throws SQLException {
String catalog;
switch (connection.catalogDataverseMode) {
case CATALOG:
catalog = connection.getDataverseCanonicalName();
break;
case CATALOG_SCHEMA:
catalog = connection.getCatalog();
break;
default:
throw new IllegalStateException();
}
return executeGetSchemasQuery(catalog, null, "0");
}
protected ADBResultSet executeGetSchemasQuery(String catalog, String schemaPattern) throws SQLException {
return executeGetSchemasQuery(catalog, schemaPattern, "1");
}
protected ADBResultSet executeGetSchemasQuery(String catalog, String schemaPattern, String tag)
throws SQLException {
checkClosed();
StringBuilder sql = new StringBuilder(512);
populateQueryProlog(sql, "JDBC-GetSchemas-" + tag);
sql.append("select TABLE_SCHEM, TABLE_CATALOG ");
sql.append("from Metadata.`Dataverse` ");
sql.append("let ");
switch (connection.catalogDataverseMode) {
case CATALOG:
if (connection.isDatabaseEntitySupported()) {
sql.append("TABLE_CATALOG = DatabaseName || '/' || DataverseName, ");
} else {
sql.append("TABLE_CATALOG = DataverseName, ");
}
sql.append("TABLE_SCHEM = null ");
sql.append("where true ");
break;
case CATALOG_SCHEMA:
if (connection.isDatabaseEntitySupported()) {
sql.append("TABLE_CATALOG = DatabaseName, ");
sql.append("TABLE_SCHEM = DataverseName ");
} else {
sql.append("name = decode_dataverse_name(DataverseName), ");
sql.append("TABLE_CATALOG = name[0], ");
sql.append("TABLE_SCHEM = case array_length(name) when 1 then null else name[1] end ");
sql.append("where (array_length(name) between 1 and 2) ");
}
break;
default:
throw new IllegalStateException();
}
if (catalog != null) {
sql.append("and (TABLE_CATALOG = $1) ");
}
if (schemaPattern != null) {
sql.append("and (if_null(TABLE_SCHEM, '') like $2) ");
}
sql.append("order by TABLE_CATALOG, TABLE_SCHEM");
return executeQueryImpl(sql.toString(), Arrays.asList(catalog, schemaPattern));
}
protected ADBResultSet executeGetTablesQuery(String catalog, String schemaPattern, String tableNamePattern,
String[] types) throws SQLException {
checkClosed();
String datasetTermTabular = getDatasetTerm(true);
String datasetTermNonTabular = getDatasetTerm(false);
String viewTermTabular = getViewTerm(true);
String viewTermNonTabular = getViewTerm(false);
StringBuilder sql = new StringBuilder(1024);
populateQueryProlog(sql, "JDBC-GetTables");
sql.append("select TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, null REMARKS, null TYPE_CAT, ");
sql.append("null TYPE_SCHEM, null TYPE_NAME, null SELF_REFERENCING_COL_NAME, null REF_GENERATION ");
sql.append("from Metadata.`Dataset` ds join Metadata.`Datatype` dt ");
sql.append("on ds.DatatypeDataverseName = dt.DataverseName and ds.DatatypeName = dt.DatatypeName ");
if (connection.isDatabaseEntitySupported()) {
sql.append("and ds.DatatypeDatabaseName = dt.DatabaseName ");
}
sql.append("let ");
switch (connection.catalogDataverseMode) {
case CATALOG:
if (connection.isDatabaseEntitySupported()) {
sql.append("TABLE_CAT = ds.DatabaseName || '/' || ds.DataverseName, ");
} else {
sql.append("TABLE_CAT = ds.DataverseName, ");
}
sql.append("TABLE_SCHEM = null, ");
break;
case CATALOG_SCHEMA:
if (connection.isDatabaseEntitySupported()) {
sql.append("TABLE_CAT = ds.DatabaseName, ");
sql.append("TABLE_SCHEM = ds.DataverseName, ");
} else {
sql.append("dvname = decode_dataverse_name(ds.DataverseName), ");
sql.append("TABLE_CAT = dvname[0], ");
sql.append("TABLE_SCHEM = case array_length(dvname) when 1 then null else dvname[1] end, ");
}
break;
default:
throw new IllegalStateException();
}
sql.append("TABLE_NAME = ds.DatasetName, ");
sql.append("isDataset = (ds.DatasetType = 'INTERNAL' or ds.DatasetType = 'EXTERNAL'), ");
sql.append("isView = ds.DatasetType = 'VIEW', ");
sql.append("hasFields = array_length(dt.Derived.Record.Fields) > 0, ");
sql.append("TABLE_TYPE = case ");
sql.append("when isDataset then (case when hasFields then '").append(datasetTermTabular).append("' else '")
.append(datasetTermNonTabular).append("' end) ");
sql.append("when isView then (case when hasFields then '").append(viewTermTabular).append("' else '")
.append(viewTermNonTabular).append("' end) ");
sql.append("else null end ");
sql.append("where ");
sql.append("(TABLE_TYPE ").append(types != null ? "in $1" : "is not null").append(") ");
if (catalog != null) {
sql.append("and (TABLE_CAT = $2) ");
}
if (schemaPattern != null) {
sql.append("and (if_null(TABLE_SCHEM, '') like $3) ");
}
if (tableNamePattern != null) {
sql.append("and (TABLE_NAME like $4) ");
}
switch (connection.catalogDataverseMode) {
case CATALOG:
break;
case CATALOG_SCHEMA:
if (!connection.isDatabaseEntitySupported()) {
sql.append("and (array_length(dvname) between 1 and 2) ");
}
break;
default:
throw new IllegalStateException();
}
if (!connection.catalogIncludesSchemaless) {
sql.append("and hasFields ");
}
sql.append("order by TABLE_TYPE, TABLE_CAT, TABLE_SCHEM, TABLE_NAME");
List<String> typesList = types != null ? Arrays.asList(types) : null;
return executeQueryImpl(sql.toString(), Arrays.asList(typesList, catalog, schemaPattern, tableNamePattern));
}
protected ADBResultSet executeGetColumnsQuery(String catalog, String schemaPattern, String tableNamePattern,
String columnNamePattern) throws SQLException {
checkClosed();
StringBuilder sql = new StringBuilder(2048);
populateQueryProlog(sql, "JDBC-GetColumns");
sql.append("select TABLE_CAT, TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, DATA_TYPE, TYPE_NAME, COLUMN_SIZE, ");
sql.append("1 BUFFER_LENGTH, null DECIMAL_DIGITS, 2 NUM_PREC_RADIX, NULLABLE, ");
sql.append("null REMARKS, null COLUMN_DEF, DATA_TYPE SQL_DATA_TYPE,");
sql.append("0 SQL_DATETIME_SUB, COLUMN_SIZE CHAR_OCTET_LENGTH, ORDINAL_POSITION, ");
sql.append("case NULLABLE when 0 then 'NO' else 'YES' end IS_NULLABLE, ");
sql.append("null SCOPE_CATALOG, null SCOPE_SCHEMA, null SCOPE_TABLE, null SOURCE_DATA_TYPE, ");
sql.append("'NO' IS_AUTOINCREMENT, 'NO' IS_GENERATEDCOLUMN ");
sql.append("from Metadata.`Dataset` ds ");
sql.append("join Metadata.`Datatype` dt ");
sql.append("on ds.DatatypeDataverseName = dt.DataverseName and ds.DatatypeName = dt.DatatypeName ");
if (connection.isDatabaseEntitySupported()) {
sql.append("and ds.DatatypeDatabaseName = dt.DatabaseName ");
}
sql.append("unnest dt.Derived.Record.Fields as field at fieldpos ");
sql.append("left join Metadata.`Datatype` dt2 ");
sql.append(
"on field.FieldType = dt2.DatatypeName and ds.DataverseName = dt2.DataverseName and dt2.Derived is known ");
if (connection.isDatabaseEntitySupported()) {
sql.append("and ds.DatabaseName = dt2.DatabaseName ");
}
sql.append("let ");
switch (connection.catalogDataverseMode) {
case CATALOG:
if (connection.isDatabaseEntitySupported()) {
sql.append("TABLE_CAT = ds.DatabaseName || '/' || ds.DataverseName, ");
} else {
sql.append("TABLE_CAT = ds.DataverseName, ");
}
sql.append("TABLE_SCHEM = null, ");
break;
case CATALOG_SCHEMA:
if (connection.isDatabaseEntitySupported()) {
sql.append("TABLE_CAT = ds.DatabaseName, ");
sql.append("TABLE_SCHEM = ds.DataverseName, ");
} else {
sql.append("dvname = decode_dataverse_name(ds.DataverseName), ");
sql.append("TABLE_CAT = dvname[0], ");
sql.append("TABLE_SCHEM = case array_length(dvname) when 1 then null else dvname[1] end, ");
}
break;
default:
throw new IllegalStateException();
}
sql.append("TABLE_NAME = ds.DatasetName, ");
sql.append("COLUMN_NAME = field.FieldName, ");
sql.append("TYPE_NAME = case ");
for (ADBDatatype nestedType : new ADBDatatype[] { ADBDatatype.OBJECT, ADBDatatype.ARRAY,
ADBDatatype.MULTISET }) {
sql.append(String.format("when dt2.Derived.%s is known then '%s' ",
ADBDatatype.getDerivedRecordName(nestedType), nestedType.getTypeName()));
}
sql.append("else field.FieldType end, ");
sql.append("DATA_TYPE = ");
sql.append("case TYPE_NAME ");
for (ADBDatatype type : ADBDatatype.values()) {
JDBCType jdbcType = type.getJdbcType();
if (type.isNullOrMissing() || jdbcType.equals(JDBCType.OTHER)) {
// will be handled by the 'else' clause
continue;
}
sql.append("when '").append(type.getTypeName()).append("' ");
sql.append("then ").append(jdbcType.getVendorTypeNumber()).append(" ");
}
sql.append("else ").append(JDBCType.OTHER.getVendorTypeNumber()).append(" end, ");
sql.append("COLUMN_SIZE = case field.FieldType when 'string' then 32767 else 8 end, "); // TODO:based on type
sql.append("ORDINAL_POSITION = fieldpos, ");
sql.append("NULLABLE = case when field.IsNullable or field.IsMissable then 1 else 0 end ");
sql.append("where (array_length(dt.Derived.Record.Fields) > 0) ");
if (catalog != null) {
sql.append("and (TABLE_CAT = $1) ");
}
if (schemaPattern != null) {
sql.append("and (if_null(TABLE_SCHEM, '') like $2) ");
}
if (tableNamePattern != null) {
sql.append("and (TABLE_NAME like $3) ");
}
if (columnNamePattern != null) {
sql.append("and (COLUMN_NAME like $4) ");
}
switch (connection.catalogDataverseMode) {
case CATALOG:
break;
case CATALOG_SCHEMA:
if (!connection.isDatabaseEntitySupported()) {
sql.append("and (array_length(dvname) between 1 and 2) ");
}
break;
default:
throw new IllegalStateException();
}
sql.append("order by TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION");
return executeQueryImpl(sql.toString(),
Arrays.asList(catalog, schemaPattern, tableNamePattern, columnNamePattern));
}
protected ADBResultSet executeGetPrimaryKeysQuery(String catalog, String schema, String table) throws SQLException {
checkClosed();
StringBuilder sql = new StringBuilder(1024);
populateQueryProlog(sql, "JDBC-GetPrimaryKeys");
sql.append("select TABLE_CAT, TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, KEY_SEQ, PK_NAME ");
sql.append("from Metadata.`Dataset` ds ");
sql.append("join Metadata.`Datatype` dt ");
sql.append("on ds.DatatypeDataverseName = dt.DataverseName and ds.DatatypeName = dt.DatatypeName ");
if (connection.isDatabaseEntitySupported()) {
sql.append("and ds.DatatypeDatabaseName = dt.DatabaseName ");
}
sql.append("unnest coalesce(ds.InternalDetails, ds.ExternalDetails, ds.ViewDetails).PrimaryKey pki at pkipos ");
sql.append("let ");
sql.append("hasFields = array_length(dt.Derived.Record.Fields) > 0, ");
switch (connection.catalogDataverseMode) {
case CATALOG:
if (connection.isDatabaseEntitySupported()) {
sql.append("TABLE_CAT = ds.DatabaseName || '/' || ds.DataverseName, ");
} else {
sql.append("TABLE_CAT = ds.DataverseName, ");
}
sql.append("TABLE_SCHEM = null, ");
break;
case CATALOG_SCHEMA:
if (connection.isDatabaseEntitySupported()) {
sql.append("TABLE_CAT = ds.DatabaseName, ");
sql.append("TABLE_SCHEM = ds.DataverseName, ");
} else {
sql.append("dvname = decode_dataverse_name(ds.DataverseName), ");
sql.append("TABLE_CAT = dvname[0], ");
sql.append("TABLE_SCHEM = case array_length(dvname) when 1 then null else dvname[1] end, ");
}
break;
default:
throw new IllegalStateException();
}
sql.append("TABLE_NAME = ds.DatasetName, ");
sql.append("COLUMN_NAME = pki[0], ");
sql.append("KEY_SEQ = pkipos, ");
sql.append("PK_NAME = TABLE_NAME || '").append(PK_NAME_SUFFIX).append("', ");
sql.append("dsDetails = coalesce(ds.InternalDetails, ds.ExternalDetails, ds.ViewDetails) ");
sql.append("where (every pk in dsDetails.PrimaryKey satisfies array_length(pk) = 1 end) ");
sql.append("and (every si in dsDetails.KeySourceIndicator satisfies si = 0 end ) ");
if (catalog != null) {
sql.append("and (TABLE_CAT = $1) ");
}
if (schema != null) {
sql.append("and (if_null(TABLE_SCHEM, '') like $2) ");
}
if (table != null) {
sql.append("and (TABLE_NAME like $3) ");
}
switch (connection.catalogDataverseMode) {
case CATALOG:
break;
case CATALOG_SCHEMA:
if (!connection.isDatabaseEntitySupported()) {
sql.append("and (array_length(dvname) between 1 and 2) ");
}
break;
default:
throw new IllegalStateException();
}
if (!connection.catalogIncludesSchemaless) {
sql.append("and hasFields ");
}
sql.append("order by COLUMN_NAME");
return executeQueryImpl(sql.toString(), Arrays.asList(catalog, schema, table));
}
protected ADBResultSet executeGetImportedKeysQuery(String catalog, String schema, String table)
throws SQLException {
return executeGetImportedExportedKeysQuery("JDBC-GetImportedKeys", null, null, null, catalog, schema, table,
false);
}
protected ADBResultSet executeGetExportedKeysQuery(String catalog, String schema, String table)
throws SQLException {
return executeGetImportedExportedKeysQuery("JDBC-GetExportedKeys", catalog, schema, table, null, null, null,
true);
}
protected ADBResultSet executeCrossReferenceQuery(String parentCatalog, String parentSchema, String parentTable,
String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException {
return executeGetImportedExportedKeysQuery("JDBC-CrossReference", parentCatalog, parentSchema, parentTable,
foreignCatalog, foreignSchema, foreignTable, true);
}
protected ADBResultSet executeGetImportedExportedKeysQuery(String comment, String pkCatalog, String pkSchema,
String pkTable, String fkCatalog, String fkSchema, String fkTable, boolean orderByFk) throws SQLException {
StringBuilder sql = new StringBuilder(2048);
populateQueryProlog(sql, comment);
sql.append("select PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, PKCOLUMN_NAME, ");
sql.append("FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, FKCOLUMN_NAME, KEY_SEQ, ");
sql.append(DatabaseMetaData.importedKeyNoAction).append(" UPDATE_RULE, ");
sql.append(DatabaseMetaData.importedKeyNoAction).append(" DELETE_RULE, ");
sql.append("FK_NAME, PK_NAME, ");
sql.append(DatabaseMetaData.importedKeyInitiallyDeferred).append(" DEFERRABILITY ");
sql.append("from Metadata.`Dataset` ds ");
sql.append("join Metadata.`Datatype` dt ");
sql.append("on ds.DatatypeDataverseName = dt.DataverseName and ds.DatatypeName = dt.DatatypeName ");
if (connection.isDatabaseEntitySupported()) {
sql.append("and ds.DatatypeDatabaseName = dt.DatabaseName ");
}
sql.append("unnest coalesce(ds.InternalDetails, ds.ExternalDetails, ds.ViewDetails).ForeignKeys fk at fkpos ");
sql.append("join Metadata.`Dataset` ds2 ");
sql.append("on fk.RefDataverseName = ds2.DataverseName and fk.RefDatasetName = ds2.DatasetName ");
if (connection.isDatabaseEntitySupported()) {
sql.append("and fk.RefDatabaseName = ds2.DatabaseName ");
}
sql.append("unnest fk.ForeignKey fki at fkipos ");
sql.append("let ");
sql.append("hasFields = array_length(dt.Derived.Record.Fields) > 0, ");
switch (connection.catalogDataverseMode) {
case CATALOG:
if (connection.isDatabaseEntitySupported()) {
sql.append("FKTABLE_CAT = ds.DatabaseName || '/' || ds.DataverseName, ");
sql.append("PKTABLE_CAT = ds2.DatabaseName || '/' || ds2.DataverseName, ");
} else {
sql.append("FKTABLE_CAT = ds.DataverseName, ");
sql.append("PKTABLE_CAT = ds2.DataverseName, ");
}
sql.append("FKTABLE_SCHEM = null, ");
sql.append("PKTABLE_SCHEM = null, ");
break;
case CATALOG_SCHEMA:
if (connection.isDatabaseEntitySupported()) {
sql.append("FKTABLE_CAT = ds.DatabaseName, ");
sql.append("FKTABLE_SCHEM = ds.DataverseName, ");
sql.append("PKTABLE_CAT = ds2.DatabaseName, ");
sql.append("PKTABLE_SCHEM = ds2.DataverseName, ");
} else {
sql.append("dvname = decode_dataverse_name(ds.DataverseName), ");
sql.append("FKTABLE_CAT = dvname[0], ");
sql.append("FKTABLE_SCHEM = case array_length(dvname) when 1 then null else dvname[1] end, ");
sql.append("dvname2 = decode_dataverse_name(ds2.DataverseName), ");
sql.append("PKTABLE_CAT = dvname2[0], ");
sql.append("PKTABLE_SCHEM = case array_length(dvname2) when 1 then null else dvname2[1] end, ");
}
break;
default:
throw new IllegalStateException();
}
sql.append("ds2Details = coalesce(ds2.InternalDetails, ds2.ExternalDetails, ds2.ViewDetails), ");
sql.append("FKTABLE_NAME = ds.DatasetName, ");
sql.append("PKTABLE_NAME = ds2.DatasetName, ");
sql.append("FKCOLUMN_NAME = fki[0], ");
sql.append("PKCOLUMN_NAME = ds2Details.PrimaryKey[fkipos-1][0], ");
sql.append("KEY_SEQ = fkipos, ");
sql.append("PK_NAME = PKTABLE_NAME || '").append(PK_NAME_SUFFIX).append("', ");
sql.append("FK_NAME = FKTABLE_NAME || '").append(FK_NAME_SUFFIX).append("_' || string(fkpos) ");
sql.append("where (every fki2 in fk.ForeignKey satisfies array_length(fki2) = 1 end) ");
sql.append("and (every fksi in fk.KeySourceIndicator satisfies fksi = 0 end ) ");
sql.append("and (every pki in ds2Details.PrimaryKey satisfies array_length(pki) = 1 end) ");
sql.append("and (every pksi in ds2Details.KeySourceIndicator satisfies pksi = 0 end) ");
if (pkCatalog != null) {
sql.append("and (").append("PKTABLE_CAT").append(" = $1) ");
}
if (pkSchema != null) {
sql.append("and (if_null(").append("PKTABLE_SCHEM").append(", '') like $2) ");
}
if (pkTable != null) {
sql.append("and (").append("PKTABLE_NAME").append(" like $3) ");
}
if (fkCatalog != null) {
sql.append("and (").append("FKTABLE_CAT").append(" = $4) ");
}
if (fkSchema != null) {
sql.append("and (if_null(").append("FKTABLE_SCHEM").append(", '') like $5) ");
}
if (fkTable != null) {
sql.append("and (").append("FKTABLE_NAME").append(" like $6) ");
}
switch (connection.catalogDataverseMode) {
case CATALOG:
break;
case CATALOG_SCHEMA:
if (!connection.isDatabaseEntitySupported()) {
sql.append("and (array_length(dvname) between 1 and 2) ");
sql.append("and (array_length(dvname2) between 1 and 2) ");
}
break;
default:
throw new IllegalStateException();
}
if (!connection.catalogIncludesSchemaless) {
sql.append("and hasFields ");
}
sql.append("order by ").append(
orderByFk ? "FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME" : "PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME")
.append(", KEY_SEQ");
return executeQueryImpl(sql.toString(),
Arrays.asList(pkCatalog, pkSchema, pkTable, fkCatalog, fkSchema, fkTable));
}
protected ADBResultSet executeGetTableTypesQuery() throws SQLException {
checkClosed();
LinkedHashSet<String> tableTypes = new LinkedHashSet<>();
tableTypes.add(getDatasetTerm(true));
tableTypes.add(getViewTerm(true));
if (connection.catalogIncludesSchemaless) {
tableTypes.add(getDatasetTerm(false));
tableTypes.add(getViewTerm(false));
}
List<ADBColumn> columns = Collections.singletonList(new ADBColumn("TABLE_TYPE", ADBDatatype.STRING, false));
AbstractValueSerializer stringSer = getADMFormatSerializer(String.class);
ArrayNode result =
(ArrayNode) connection.protocol.getDriverContext().getGenericObjectReader().createArrayNode();
for (String tableType : tableTypes) {
result.addObject().put("TABLE_TYPE", stringSer.serializeToString(tableType));
}
return createSystemResultSet(columns, result);
}
protected ADBResultSet executeGetTypeInfoQuery() throws SQLException {
checkClosed();
AbstractValueSerializer int16Ser = getADMFormatSerializer(Short.class);
AbstractValueSerializer int32Ser = getADMFormatSerializer(Integer.class);
AbstractValueSerializer stringSer = getADMFormatSerializer(String.class);
List<ADBColumn> columns = new ArrayList<>();
columns.add(new ADBColumn("TYPE_NAME", ADBDatatype.STRING, false));
columns.add(new ADBColumn("DATA_TYPE", ADBDatatype.INTEGER, false));
columns.add(new ADBColumn("PRECISION", ADBDatatype.INTEGER, true));
columns.add(new ADBColumn("LITERAL_PREFIX", ADBDatatype.STRING, true));
columns.add(new ADBColumn("LITERAL_SUFFIX", ADBDatatype.STRING, true));
columns.add(new ADBColumn("CREATE_PARAMS", ADBDatatype.STRING, true));
columns.add(new ADBColumn("NULLABLE", ADBDatatype.SMALLINT, true));
columns.add(new ADBColumn("CASE_SENSITIVE", ADBDatatype.BOOLEAN, true));
columns.add(new ADBColumn("SEARCHABLE", ADBDatatype.SMALLINT, true));
columns.add(new ADBColumn("UNSIGNED_ATTRIBUTE", ADBDatatype.BOOLEAN, true));
columns.add(new ADBColumn("FIXED_PREC_SCALE", ADBDatatype.BOOLEAN, true));
columns.add(new ADBColumn("AUTO_INCREMENT", ADBDatatype.BOOLEAN, true));
columns.add(new ADBColumn("LOCAL_TYPE_NAME", ADBDatatype.STRING, true));
columns.add(new ADBColumn("MINIMUM_SCALE", ADBDatatype.SMALLINT, true));
columns.add(new ADBColumn("MAXIMUM_SCALE", ADBDatatype.SMALLINT, true));
columns.add(new ADBColumn("SQL_DATA_TYPE", ADBDatatype.INTEGER, true));
columns.add(new ADBColumn("SQL_DATETIME_SUB", ADBDatatype.INTEGER, true));
columns.add(new ADBColumn("NUM_PREC_RADIX", ADBDatatype.INTEGER, true));
ArrayNode result =
(ArrayNode) connection.protocol.getDriverContext().getGenericObjectReader().createArrayNode();
populateTypeInfo(result.addObject(), ADBDatatype.BOOLEAN, 1, null, null, null, null, null, null, int16Ser,
int32Ser, stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.TINYINT, 3, 10, 0, 0, false, null, null, int16Ser, int32Ser,
stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.SMALLINT, 5, 10, 0, 0, false, null, null, int16Ser, int32Ser,
stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.INTEGER, 10, 10, 0, 0, false, null, null, int16Ser, int32Ser,
stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.BIGINT, 19, 10, 0, 0, false, null, null, int16Ser, int32Ser,
stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.FLOAT, 7, 2, 0, 0, false, null, null, int16Ser, int32Ser,
stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.DOUBLE, 15, 2, 0, 0, false, null, null, int16Ser, int32Ser,
stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.DATE, 32, null, 0, 0, false, null, null, int16Ser, int32Ser,
stringSer); // TODO:precision
populateTypeInfo(result.addObject(), ADBDatatype.TIME, 32, null, 0, 0, false, null, null, int16Ser, int32Ser,
stringSer); // TODO:precision
populateTypeInfo(result.addObject(), ADBDatatype.DATETIME, 32, null, 0, 0, false, null, null, int16Ser,
int32Ser, stringSer); // TODO:precision
populateTypeInfo(result.addObject(), ADBDatatype.YEARMONTHDURATION, 32, null, 0, 0, false, null, null, int16Ser,
int32Ser, stringSer); // TODO:precision
populateTypeInfo(result.addObject(), ADBDatatype.DAYTIMEDURATION, 32, null, 0, 0, false, null, null, int16Ser,
int32Ser, stringSer); // TODO:precision
populateTypeInfo(result.addObject(), ADBDatatype.DURATION, 32, null, 0, 0, false, null, null, int16Ser,
int32Ser, stringSer); // TODO:precision
populateTypeInfo(result.addObject(), ADBDatatype.STRING, 32767, null, null, null, true, "'", "'", int16Ser,
int32Ser, stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.ARRAY, 32767, null, 0, 0, false, null, null, int16Ser,
int32Ser, stringSer);
populateTypeInfo(result.addObject(), ADBDatatype.OBJECT, 32767, null, 0, 0, false, null, null, int16Ser,
int32Ser, stringSer);
return createSystemResultSet(columns, result);
}
protected void populateTypeInfo(ObjectNode typeInfo, ADBDatatype type, int precision, Integer precisionRadix,
Integer minScale, Integer maxScale, Boolean searchable, String literalPrefix, String literalSuffix,
ADBPreparedStatement.AbstractValueSerializer int16Ser,
ADBPreparedStatement.AbstractValueSerializer int32Ser,
ADBPreparedStatement.AbstractValueSerializer stringSer) {
typeInfo.put("TYPE_NAME", stringSer.serializeToString(type.getTypeName()));
typeInfo.put("DATA_TYPE", int32Ser.serializeToString(type.getJdbcType().getVendorTypeNumber()));
typeInfo.put("PRECISION", int32Ser.serializeToString(precision));
typeInfo.put("LITERAL_PREFIX", literalPrefix != null ? stringSer.serializeToString(literalPrefix) : null);
typeInfo.put("LITERAL_SUFFIX", literalSuffix != null ? stringSer.serializeToString(literalSuffix) : null);
typeInfo.putNull("CREATE_PARAMS");
typeInfo.put("NULLABLE", int16Ser.serializeToString((short) DatabaseMetaData.typeNullable));
typeInfo.put("CASE_SENSITIVE", false);
typeInfo.put("SEARCHABLE",
int16Ser.serializeToString((short) (searchable == null ? DatabaseMetaData.typePredNone
: searchable ? DatabaseMetaData.typeSearchable : DatabaseMetaData.typePredBasic)));
typeInfo.put("UNSIGNED_ATTRIBUTE", false);
typeInfo.put("FIXED_PREC_SCALE", false);
typeInfo.putNull("AUTO_INCREMENT");
typeInfo.putNull("LOCAL_TYPE_NAME");
typeInfo.put("MINIMUM_SCALE", minScale != null ? int16Ser.serializeToString(minScale.shortValue()) : null);
typeInfo.put("MAXIMUM_SCALE", maxScale != null ? int16Ser.serializeToString(maxScale.shortValue()) : null);
typeInfo.put("SQL_DATA_TYPE", int32Ser.serializeToString(type.getTypeTag()));
typeInfo.putNull("SQL_DATETIME_SUB");
typeInfo.put("NUM_PREC_RADIX", int32Ser.serializeToString(precisionRadix != null ? precisionRadix : 10));
}
protected ADBResultSet executeEmptyResultQuery() throws SQLException {
checkClosed();
return createEmptyResultSet();
}
@Override
protected ADBStatement getResultSetStatement(ADBResultSet rs) {
return null;
}
@Override
protected ADBProtocolBase.SubmitStatementOptions createSubmitStatementOptions() {
ADBProtocolBase.SubmitStatementOptions options = super.createSubmitStatementOptions();
// Metadata queries are always executed in SQL++ mode
options.sqlCompatMode = false;
return options;
}
protected String getDatasetTerm(boolean tabular) {
return tabular ? TABLE : SCHEMALESS + " " + TABLE;
}
protected String getViewTerm(boolean tabular) {
return tabular ? VIEW : SCHEMALESS + " " + VIEW;
}
}
| 6,053 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBStatement.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.io.IOException;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Period;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.BeanDescription;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializationConfig;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.ser.BeanSerializerModifier;
public class ADBStatement extends ADBWrapperSupport implements java.sql.Statement {
static final List<Class<?>> SET_OBJECT_ATOMIC_EXTRA =
Arrays.asList(SqlCalendarDate.class, SqlCalendarTime.class, SqlCalendarTimestamp.class);
static final List<Class<?>> SET_OBJECT_NON_ATOMIC = Arrays.asList(Object[].class, Collection.class, Map.class);
static final Map<Class<?>, AbstractValueSerializer> SERIALIZER_MAP = createSerializerMap();
protected final ADBConnection connection;
protected final AtomicBoolean closed = new AtomicBoolean(false);
private volatile boolean closeOnCompletion;
protected int queryTimeoutSeconds;
protected long maxRows;
private volatile UUID executionId;
// common result fields
protected int updateCount = -1;
protected List<ADBProtocolBase.QueryServiceResponse.Message> warnings;
// executeQuery() result fields
protected final ConcurrentLinkedQueue<ADBResultSet> resultSetsWithResources;
protected final ConcurrentLinkedQueue<WeakReference<ADBResultSet>> resultSetsWithoutResources;
// execute() result field
protected ADBProtocolBase.SubmitStatementOptions executeStmtOptions;
protected ADBProtocolBase.QueryServiceResponse executeResponse;
protected ADBResultSet executeResultSet;
// Lifecycle
public ADBStatement(ADBConnection connection) {
this.connection = Objects.requireNonNull(connection);
this.resultSetsWithResources = new ConcurrentLinkedQueue<>();
this.resultSetsWithoutResources = new ConcurrentLinkedQueue<>();
resetExecutionId();
}
@Override
public void close() throws SQLException {
closeImpl(true, true);
}
protected void closeImpl(boolean closeResultSets, boolean notifyConnection) throws SQLException {
boolean wasClosed = closed.getAndSet(true);
if (wasClosed) {
return;
}
try {
if (closeResultSets) {
closeRegisteredResultSets();
}
} finally {
if (notifyConnection) {
connection.deregisterStatement(this);
}
}
}
@Override
public void closeOnCompletion() throws SQLException {
checkClosed();
closeOnCompletion = true;
}
@Override
public boolean isCloseOnCompletion() throws SQLException {
checkClosed();
return closeOnCompletion;
}
@Override
public boolean isClosed() {
return closed.get();
}
protected void checkClosed() throws SQLException {
if (isClosed()) {
throw getErrorReporter().errorObjectClosed(Statement.class);
}
}
// Execution
@Override
public ADBResultSet executeQuery(String sql) throws SQLException {
checkClosed();
return executeQueryImpl(sql, null);
}
protected ADBResultSet executeQueryImpl(String sql, List<?> args) throws SQLException {
// note: we're not assigning executeResponse field at this method
try {
ADBProtocolBase.SubmitStatementOptions stmtOptions = createSubmitStatementOptions();
stmtOptions.executionId = executionId;
stmtOptions.forceReadOnly = true;
ADBProtocolBase.QueryServiceResponse response = connection.protocol.submitStatement(sql, args, stmtOptions);
boolean isQuery = connection.protocol.isStatementCategory(response,
ADBProtocolBase.QueryServiceResponse.StatementCategory.QUERY);
if (!isQuery) {
throw getErrorReporter().errorInvalidStatementCategory();
}
warnings = connection.protocol.getWarningIfExists(response);
updateCount = -1;
return fetchResultSet(response, stmtOptions);
} finally {
resetExecutionId();
}
}
@Override
public long executeLargeUpdate(String sql) throws SQLException {
checkClosed();
return executeUpdateImpl(sql, null);
}
@Override
public int executeUpdate(String sql) throws SQLException {
checkClosed();
return executeUpdateImpl(sql, null);
}
@Override
public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "executeLargeUpdate");
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "executeUpdate");
}
@Override
public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "executeLargeUpdate");
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "executeUpdate");
}
@Override
public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "executeLargeUpdate");
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "executeUpdate");
}
protected int executeUpdateImpl(String sql, List<Object> args) throws SQLException {
try {
ADBProtocolBase.SubmitStatementOptions stmtOptions = createSubmitStatementOptions();
stmtOptions.executionId = executionId;
ADBProtocolBase.QueryServiceResponse response = connection.protocol.submitStatement(sql, args, stmtOptions);
boolean isQuery = connection.protocol.isStatementCategory(response,
ADBProtocolBase.QueryServiceResponse.StatementCategory.QUERY);
// TODO: remove result set on the server (both query and update returning cases)
if (isQuery) {
throw getErrorReporter().errorInvalidStatementCategory();
}
warnings = connection.protocol.getWarningIfExists(response);
updateCount = connection.protocol.getUpdateCount(response);
return updateCount;
} finally {
resetExecutionId();
}
}
@Override
public boolean execute(String sql) throws SQLException {
checkClosed();
return executeImpl(sql, null);
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "execute");
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "execute");
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "execute");
}
protected boolean executeImpl(String sql, List<Object> args) throws SQLException {
try {
ADBProtocolBase.SubmitStatementOptions stmtOptions = createSubmitStatementOptions();
stmtOptions.executionId = executionId;
ADBProtocolBase.QueryServiceResponse response = connection.protocol.submitStatement(sql, args, stmtOptions);
warnings = connection.protocol.getWarningIfExists(response);
executeStmtOptions = stmtOptions;
boolean isQuery = connection.protocol.isStatementCategory(response,
ADBProtocolBase.QueryServiceResponse.StatementCategory.QUERY);
if (isQuery) {
updateCount = -1;
executeResponse = response;
return true;
} else {
updateCount = connection.protocol.getUpdateCount(response);
executeResponse = null;
return false;
}
} finally {
resetExecutionId();
}
}
@Override
public void cancel() throws SQLException {
checkClosed();
connection.protocol.cancelRunningStatement(executionId);
}
@Override
public int getQueryTimeout() throws SQLException {
checkClosed();
return queryTimeoutSeconds;
}
@Override
public void setQueryTimeout(int timeoutSeconds) throws SQLException {
checkClosed();
if (timeoutSeconds < 0) {
throw getErrorReporter().errorParameterValueNotSupported("timeoutSeconds");
}
queryTimeoutSeconds = timeoutSeconds;
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
checkClosed();
}
private void resetExecutionId() {
executionId = UUID.randomUUID();
}
protected ADBProtocolBase.SubmitStatementOptions createSubmitStatementOptions() {
ADBProtocolBase.SubmitStatementOptions stmtOptions = connection.protocol.createSubmitStatementOptions();
stmtOptions.dataverseName = connection.getDataverseCanonicalName();
stmtOptions.sqlCompatMode = connection.sqlCompatMode;
stmtOptions.timeoutSeconds = queryTimeoutSeconds;
return stmtOptions;
}
// Batch execution
@Override
public long[] executeLargeBatch() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "executeLargeBatch");
}
@Override
public int[] executeBatch() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "executeBatch");
}
@Override
public void addBatch(String sql) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "addBatch");
}
@Override
public void clearBatch() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "clearBatch");
}
// Result access
@Override
public ADBResultSet getResultSet() throws SQLException {
checkClosed();
ADBProtocolBase.QueryServiceResponse response = executeResponse;
if (response == null) {
return null;
}
ADBResultSet rs = fetchResultSet(response, executeStmtOptions);
executeResultSet = rs;
executeResponse = null;
executeStmtOptions = null;
return rs;
}
@Override
public boolean getMoreResults() throws SQLException {
return getMoreResults(Statement.CLOSE_ALL_RESULTS);
}
@Override
public boolean getMoreResults(int current) throws SQLException {
checkClosed();
ADBResultSet rs = executeResultSet;
executeResultSet = null;
if (rs != null && current != Statement.KEEP_CURRENT_RESULT) {
rs.closeImpl(true);
}
return false;
}
@Override
public int getResultSetType() throws SQLException {
checkClosed();
return ResultSet.TYPE_FORWARD_ONLY;
}
@Override
public int getResultSetConcurrency() throws SQLException {
checkClosed();
return ResultSet.CONCUR_READ_ONLY;
}
@Override
public int getResultSetHoldability() throws SQLException {
checkClosed();
return ADBResultSet.RESULT_SET_HOLDABILITY;
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
checkClosed();
return createEmptyResultSet();
}
@Override
public long getLargeUpdateCount() throws SQLException {
checkClosed();
return updateCount;
}
@Override
public int getUpdateCount() throws SQLException {
return (int) getLargeUpdateCount();
}
// ResultSet lifecycle
protected ADBResultSet fetchResultSet(ADBProtocolBase.QueryServiceResponse execResponse,
ADBProtocolBase.SubmitStatementOptions stmtOptions) throws SQLException {
List<ADBColumn> columns = connection.protocol.getColumns(execResponse);
if (getLogger().isLoggable(Level.FINER)) {
getLogger().log(Level.FINE, "result schema " + columns);
}
if (connection.protocol.isExplainOnly(execResponse)) {
AbstractValueSerializer stringSer = getADMFormatSerializer(String.class);
ArrayNode explainResult =
connection.protocol.fetchExplainOnlyResult(execResponse, stringSer::serializeToString);
return createSystemResultSet(columns, explainResult);
} else {
JsonParser rowParser = connection.protocol.fetchResult(execResponse, stmtOptions);
return createResultSetImpl(columns, rowParser, true, maxRows);
}
}
protected ADBResultSet createSystemResultSet(List<ADBColumn> columns, ArrayNode values) {
JsonParser rowParser = connection.protocol.getDriverContext().getGenericObjectReader().treeAsTokens(values);
return createResultSetImpl(columns, rowParser, false, 0);
}
protected ADBResultSet createEmptyResultSet() {
ArrayNode empty = (ArrayNode) connection.protocol.getDriverContext().getGenericObjectReader().createArrayNode();
return createSystemResultSet(Collections.emptyList(), empty);
}
protected ADBResultSet createResultSetImpl(List<ADBColumn> columns, JsonParser rowParser,
boolean rowParserOwnsResources, long maxRows) {
ADBResultSetMetaData metadata = new ADBResultSetMetaData(this, columns);
ADBResultSet rs = new ADBResultSet(metadata, rowParser, rowParserOwnsResources, maxRows);
registerResultSet(rs);
return rs;
}
protected void registerResultSet(ADBResultSet rs) {
if (rs.rowParserOwnsResources) {
resultSetsWithResources.add(rs);
} else {
resultSetsWithoutResources.removeIf(ADBStatement::isEmptyReference);
resultSetsWithoutResources.add(new WeakReference<>(rs));
}
}
protected void deregisterResultSet(ADBResultSet rs) {
if (rs.rowParserOwnsResources) {
resultSetsWithResources.remove(rs);
} else {
resultSetsWithoutResources.removeIf(ref -> {
ADBResultSet refrs = ref.get();
return refrs == null || refrs == rs;
});
}
if (closeOnCompletion && resultSetsWithResources.isEmpty() && resultSetsWithoutResources.isEmpty()) {
try {
closeImpl(false, true);
} catch (SQLException e) {
// this exception shouldn't happen because there are no result sets to close
if (getLogger().isLoggable(Level.FINE)) {
getLogger().log(Level.FINE, e.getMessage(), e);
}
}
}
}
protected void closeRegisteredResultSets() throws SQLException {
SQLException err = null;
try {
closedRegisteredResultSetsImpl(resultSetsWithResources, Function.identity());
} catch (SQLException e) {
err = e;
}
try {
closedRegisteredResultSetsImpl(resultSetsWithoutResources, Reference::get);
} catch (SQLException e) {
if (err != null) {
e.addSuppressed(err);
}
err = e;
}
if (err != null) {
throw err;
}
}
protected <T> void closedRegisteredResultSetsImpl(Queue<T> queue, Function<T, ADBResultSet> rsAccessor)
throws SQLException {
SQLException err = null;
T item;
while ((item = queue.poll()) != null) {
ADBResultSet rs = rsAccessor.apply(item);
if (rs != null) {
try {
rs.closeImpl(false);
} catch (SQLException e) {
if (err != null) {
e.addSuppressed(err);
}
err = e;
}
}
}
if (err != null) {
throw err;
}
}
private static boolean isEmptyReference(Reference<ADBResultSet> ref) {
return ref.get() == null;
}
// Result control
@Override
public void setLargeMaxRows(long maxRows) throws SQLException {
checkClosed();
if (maxRows < 0) {
throw getErrorReporter().errorParameterValueNotSupported("maxRows");
}
this.maxRows = maxRows;
}
@Override
public void setMaxRows(int maxRows) throws SQLException {
setLargeMaxRows(maxRows);
}
@Override
public long getLargeMaxRows() throws SQLException {
checkClosed();
return maxRows;
}
@Override
public int getMaxRows() throws SQLException {
return (int) getLargeMaxRows();
}
@Override
public void setCursorName(String name) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "setCursorName");
}
// Unsupported hints (ignored)
@Override
public int getFetchDirection() throws SQLException {
checkClosed();
return ResultSet.FETCH_FORWARD;
}
@Override
public void setFetchDirection(int direction) throws SQLException {
checkClosed();
switch (direction) {
case ResultSet.FETCH_FORWARD:
case ResultSet.FETCH_REVERSE:
case ResultSet.FETCH_UNKNOWN:
// ignore this hint
break;
default:
throw getErrorReporter().errorParameterValueNotSupported("direction");
}
}
@Override
public int getFetchSize() throws SQLException {
checkClosed();
return 1;
}
@Override
public void setFetchSize(int rows) throws SQLException {
checkClosed();
if (rows < 0) {
throw getErrorReporter().errorParameterNotSupported("rows");
}
}
@Override
public int getMaxFieldSize() throws SQLException {
checkClosed();
return 0;
}
@Override
public void setMaxFieldSize(int maxFieldSize) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Statement.class, "setMaxFieldSize");
}
@Override
public boolean isPoolable() throws SQLException {
checkClosed();
return false;
}
@Override
public void setPoolable(boolean poolable) throws SQLException {
checkClosed();
}
// Errors and warnings
@Override
public SQLWarning getWarnings() throws SQLException {
checkClosed();
return warnings != null ? connection.protocol.createSQLWarning(warnings) : null;
}
@Override
public void clearWarnings() throws SQLException {
checkClosed();
warnings = null;
}
@Override
protected ADBErrorReporter getErrorReporter() {
return connection.getErrorReporter();
}
protected Logger getLogger() {
return connection.getLogger();
}
// Ownership
@Override
public Connection getConnection() throws SQLException {
checkClosed();
return connection;
}
ADBStatement getResultSetStatement(ADBResultSet rs) {
return rs.metadata.statement;
}
// Serialization
protected static void configureADMFormatSerialization(SimpleModule serdeModule) {
serdeModule.setSerializerModifier(createADMFormatSerializerModifier());
}
protected static AbstractValueSerializer getADMFormatSerializer(Class<?> cls) {
return SERIALIZER_MAP.get(cls);
}
protected static BeanSerializerModifier createADMFormatSerializerModifier() {
return new BeanSerializerModifier() {
@Override
public JsonSerializer<?> modifySerializer(SerializationConfig config, BeanDescription beanDesc,
JsonSerializer<?> serializer) {
Class<?> cls = beanDesc.getClassInfo().getAnnotated();
if (isSetObjectCompatible(cls)) {
AbstractValueSerializer ser = getADMFormatSerializer(cls);
return ser != null ? ser : super.modifySerializer(config, beanDesc, serializer);
} else {
return null;
}
}
};
}
protected static boolean isSetObjectCompatible(Class<?> cls) {
if (ADBRowStore.OBJECT_ACCESSORS_ATOMIC.containsKey(cls) || SET_OBJECT_ATOMIC_EXTRA.contains(cls)) {
return true;
}
for (Class<?> aClass : SET_OBJECT_NON_ATOMIC) {
if (aClass.isAssignableFrom(cls)) {
return true;
}
}
return false;
}
protected static Map<Class<?>, AbstractValueSerializer> createSerializerMap() {
Map<Class<?>, AbstractValueSerializer> serializerMap = new HashMap<>();
registerSerializer(serializerMap, createGenericSerializer(Byte.class, ADBDatatype.TINYINT));
registerSerializer(serializerMap, createGenericSerializer(Short.class, ADBDatatype.SMALLINT));
registerSerializer(serializerMap, createGenericSerializer(Integer.class, ADBDatatype.INTEGER));
registerSerializer(serializerMap, createGenericSerializer(UUID.class, ADBDatatype.UUID));
// Long is serialized as JSON number by Jackson
registerSerializer(serializerMap, createFloatSerializer());
registerSerializer(serializerMap, createDoubleSerializer());
registerSerializer(serializerMap, createBigDecimalSerializer());
registerSerializer(serializerMap, createStringSerializer());
registerSerializer(serializerMap, createSqlDateSerializer());
registerSerializer(serializerMap, createSqlDateWithCalendarSerializer());
registerSerializer(serializerMap, createLocalDateSerializer());
registerSerializer(serializerMap, createSqlTimeSerializer());
registerSerializer(serializerMap, createSqlCalendarTimeSerializer());
registerSerializer(serializerMap, createLocalTimeSerializer());
registerSerializer(serializerMap, createSqlTimestampSerializer());
registerSerializer(serializerMap, createSqlCalendarTimestampSerializer());
registerSerializer(serializerMap, createLocalDateTimeSerializer());
registerSerializer(serializerMap, createPeriodSerializer());
registerSerializer(serializerMap, createDurationSerializer());
return serializerMap;
}
protected static void registerSerializer(Map<Class<?>, AbstractValueSerializer> map,
AbstractValueSerializer serializer) {
map.put(serializer.getJavaType(), serializer);
}
protected static ATaggedValueSerializer createGenericSerializer(Class<?> javaType, ADBDatatype ADBDatatype) {
return new ATaggedValueSerializer(javaType, ADBDatatype) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
out.append(value);
}
};
}
protected static AbstractValueSerializer createStringSerializer() {
return new AbstractValueSerializer(java.lang.String.class) {
@Override
public void serialize(Object value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
gen.writeString(serializeToString(value));
}
@Override
protected String serializeToString(Object value) {
return ADBRowStore.TEXT_DELIMITER + String.valueOf(value);
}
};
}
protected static ATaggedValueSerializer createFloatSerializer() {
return new ATaggedValueSerializer(Float.class, ADBDatatype.FLOAT) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
int bits = Float.floatToIntBits((Float) value);
out.append((long) bits);
}
};
}
protected static ATaggedValueSerializer createDoubleSerializer() {
return new ATaggedValueSerializer(Double.class, ADBDatatype.DOUBLE) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long bits = Double.doubleToLongBits((Double) value);
out.append(bits);
}
};
}
protected static ATaggedValueSerializer createBigDecimalSerializer() {
return new ATaggedValueSerializer(BigDecimal.class, ADBDatatype.DOUBLE) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long bits = Double.doubleToLongBits(((BigDecimal) value).doubleValue());
out.append(bits);
}
};
}
protected static ATaggedValueSerializer createSqlDateSerializer() {
return new ATaggedValueSerializer(java.sql.Date.class, ADBDatatype.DATE) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long millis = ((Date) value).getTime();
long millisAdjusted = getDatetimeChrononAdjusted(millis, TimeZone.getDefault());
long days = TimeUnit.MILLISECONDS.toDays(millisAdjusted);
out.append(days);
}
};
}
protected static ATaggedValueSerializer createSqlDateWithCalendarSerializer() {
return new ATaggedValueSerializer(SqlCalendarDate.class, ADBDatatype.DATE) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
SqlCalendarDate dateWithCalendar = (SqlCalendarDate) value;
long millis = dateWithCalendar.date.getTime();
long millisAdjusted = getDatetimeChrononAdjusted(millis, dateWithCalendar.timeZone);
long days = TimeUnit.MILLISECONDS.toDays(millisAdjusted);
out.append(days);
}
};
}
protected static ATaggedValueSerializer createLocalDateSerializer() {
return new ATaggedValueSerializer(java.time.LocalDate.class, ADBDatatype.DATE) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long days = ((LocalDate) value).toEpochDay();
out.append(days);
}
};
}
protected static ATaggedValueSerializer createSqlTimeSerializer() {
return new ATaggedValueSerializer(java.sql.Time.class, ADBDatatype.TIME) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long millis = ((Time) value).getTime();
long millisAdjusted = getDatetimeChrononAdjusted(millis, TimeZone.getDefault());
long timeMillis = millisAdjusted - TimeUnit.DAYS.toMillis(TimeUnit.MILLISECONDS.toDays(millisAdjusted));
out.append(timeMillis);
}
};
}
protected static ATaggedValueSerializer createSqlCalendarTimeSerializer() {
return new ATaggedValueSerializer(SqlCalendarTime.class, ADBDatatype.TIME) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
SqlCalendarTime timeWithCalendar = (SqlCalendarTime) value;
long millis = timeWithCalendar.time.getTime();
long millisAdjusted = getDatetimeChrononAdjusted(millis, timeWithCalendar.timeZone);
long timeMillis = millisAdjusted - TimeUnit.DAYS.toMillis(TimeUnit.MILLISECONDS.toDays(millisAdjusted));
out.append(timeMillis);
}
};
}
protected static ATaggedValueSerializer createLocalTimeSerializer() {
return new ATaggedValueSerializer(java.time.LocalTime.class, ADBDatatype.TIME) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long nanos = ((LocalTime) value).toNanoOfDay();
long timeMillis = TimeUnit.NANOSECONDS.toMillis(nanos);
out.append(timeMillis);
}
};
}
protected static ATaggedValueSerializer createSqlTimestampSerializer() {
return new ATaggedValueSerializer(java.sql.Timestamp.class, ADBDatatype.DATETIME) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long millis = ((Timestamp) value).getTime();
long millisAdjusted = getDatetimeChrononAdjusted(millis, TimeZone.getDefault());
out.append(millisAdjusted);
}
};
}
protected static ATaggedValueSerializer createSqlCalendarTimestampSerializer() {
return new ATaggedValueSerializer(SqlCalendarTimestamp.class, ADBDatatype.DATETIME) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
SqlCalendarTimestamp timestampWithCalendar = (SqlCalendarTimestamp) value;
long millis = timestampWithCalendar.timestamp.getTime();
long millisAdjusted = getDatetimeChrononAdjusted(millis, timestampWithCalendar.timeZone);
out.append(millisAdjusted);
}
};
}
protected static ATaggedValueSerializer createLocalDateTimeSerializer() {
return new ATaggedValueSerializer(java.time.LocalDateTime.class, ADBDatatype.DATETIME) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long millis = ((LocalDateTime) value).atZone(TZ_UTC).toInstant().toEpochMilli();
out.append(millis);
}
};
}
protected static ATaggedValueSerializer createPeriodSerializer() {
return new ATaggedValueSerializer(java.time.Period.class, ADBDatatype.YEARMONTHDURATION) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long months = ((Period) value).toTotalMonths();
out.append(months);
}
};
}
protected static ATaggedValueSerializer createDurationSerializer() {
return new ATaggedValueSerializer(java.time.Duration.class, ADBDatatype.DAYTIMEDURATION) {
@Override
protected void serializeNonTaggedValue(Object value, StringBuilder out) {
long millis = ((Duration) value).toMillis();
out.append(millis);
}
};
}
protected static abstract class AbstractValueSerializer extends JsonSerializer<Object> {
protected final Class<?> javaType;
protected AbstractValueSerializer(Class<?> javaType) {
this.javaType = Objects.requireNonNull(javaType);
}
protected Class<?> getJavaType() {
return javaType;
}
abstract String serializeToString(Object value);
}
protected static abstract class ATaggedValueSerializer extends AbstractValueSerializer {
protected static ZoneId TZ_UTC = ZoneId.of("UTC");
protected final ADBDatatype adbType;
protected ATaggedValueSerializer(Class<?> javaType, ADBDatatype adbType) {
super(javaType);
this.adbType = Objects.requireNonNull(adbType);
}
@Override
public void serialize(Object value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
gen.writeString(serializeToString(value)); // TODO:optimize?
}
protected final String serializeToString(Object value) {
StringBuilder textBuilder = new StringBuilder(64); // TODO:optimize?
printByteAsHex(adbType.getTypeTag(), textBuilder);
textBuilder.append(ADBRowStore.TEXT_DELIMITER);
serializeNonTaggedValue(value, textBuilder);
return textBuilder.toString();
}
protected abstract void serializeNonTaggedValue(Object value, StringBuilder out);
private static void printByteAsHex(byte b, StringBuilder out) {
out.append(hex((b >>> 4) & 0x0f));
out.append(hex(b & 0x0f));
}
private static char hex(int i) {
return (char) (i + (i < 10 ? '0' : ('A' - 10)));
}
protected long getDatetimeChrononAdjusted(long datetimeChrononInMillis, TimeZone tz) {
int tzOffset = tz.getOffset(datetimeChrononInMillis);
return datetimeChrononInMillis + tzOffset;
}
}
protected static abstract class AbstractSqlCalendarDateTime {
final TimeZone timeZone;
AbstractSqlCalendarDateTime(TimeZone timeZone) {
this.timeZone = timeZone;
}
}
protected static final class SqlCalendarDate extends AbstractSqlCalendarDateTime {
final Date date;
SqlCalendarDate(Date date, TimeZone timeZone) {
super(timeZone);
this.date = date;
}
}
protected static final class SqlCalendarTime extends AbstractSqlCalendarDateTime {
final Time time;
SqlCalendarTime(Time time, TimeZone timeZone) {
super(timeZone);
this.time = time;
}
}
protected static final class SqlCalendarTimestamp extends AbstractSqlCalendarDateTime {
final Timestamp timestamp;
SqlCalendarTimestamp(Timestamp timestamp, TimeZone timeZone) {
super(timeZone);
this.timestamp = timestamp;
}
}
}
| 6,054 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBPreparedStatement.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.NClob;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLType;
import java.sql.SQLXML;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
public class ADBPreparedStatement extends ADBStatement implements PreparedStatement {
protected final String sql;
protected final List<Object> args;
protected final List<ADBColumn> resultColumns;
public ADBPreparedStatement(ADBConnection connection, String sql) throws SQLException {
super(connection);
ADBProtocolBase.SubmitStatementOptions stmtOptions = createSubmitStatementOptions();
stmtOptions.compileOnly = true;
stmtOptions.timeoutSeconds = 0; /* TODO:timeout */
ADBProtocolBase.QueryServiceResponse response = connection.protocol.submitStatement(sql, null, stmtOptions);
int parameterCount = connection.protocol.getStatementParameterCount(response);
boolean isQuery = connection.protocol.isStatementCategory(response,
ADBProtocolBase.QueryServiceResponse.StatementCategory.QUERY);
List<ADBColumn> columns = isQuery ? connection.protocol.getColumns(response) : Collections.emptyList();
this.sql = sql;
this.args = Arrays.asList(new Object[parameterCount]);
this.resultColumns = columns;
}
// Metadata
@Override
public ParameterMetaData getParameterMetaData() throws SQLException {
checkClosed();
return new ADBParameterMetaData(this, args.size());
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
checkClosed();
return new ADBResultSetMetaData(this, resultColumns);
}
// Execution
@Override
public ResultSet executeQuery() throws SQLException {
checkClosed();
return executeQueryImpl(sql, args);
}
@Override
public ADBResultSet executeQuery(String sql) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeQuery");
}
@Override
public long executeLargeUpdate() throws SQLException {
return executeUpdateImpl(sql, args);
}
@Override
public int executeUpdate() throws SQLException {
return executeUpdateImpl(sql, args);
}
@Override
public long executeLargeUpdate(String sql) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeLargeUpdate");
}
@Override
public int executeUpdate(String sql) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeUpdate");
}
@Override
public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeLargeUpdate");
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeUpdate");
}
@Override
public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeLargeUpdate");
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeUpdate");
}
@Override
public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeLargeUpdate");
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "executeUpdate");
}
@Override
public boolean execute() throws SQLException {
return executeImpl(sql, args);
}
@Override
public boolean execute(String sql) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "execute");
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "execute");
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "execute");
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
// Prohibited on PreparedStatement
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "execute");
}
@Override
public void addBatch() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "addBatch");
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
checkClosed();
// ignore as the statement has already been parsed
}
// Parameter bindings
@Override
public void clearParameters() throws SQLException {
checkClosed();
for (int i = 0, n = args.size(); i < n; i++) {
args.set(i, null);
}
}
private int argIndex(int parameterIndex) throws SQLException {
boolean ok = 0 < parameterIndex && parameterIndex <= args.size();
if (!ok) {
throw getErrorReporter().errorParameterValueNotSupported("parameterIndex");
}
return parameterIndex - 1;
}
private void setArg(int parameterIndex, Object v) throws SQLException {
args.set(argIndex(parameterIndex), v);
}
// Basic types
@Override
public void setNull(int parameterIndex, int sqlType) throws SQLException {
checkClosed();
setArg(parameterIndex, null);
}
@Override
public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException {
checkClosed();
setNull(parameterIndex, sqlType);
}
@Override
public void setBoolean(int parameterIndex, boolean v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setByte(int parameterIndex, byte v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setShort(int parameterIndex, short v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setInt(int parameterIndex, int v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setLong(int parameterIndex, long v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setFloat(int parameterIndex, float v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setDouble(int parameterIndex, double v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setBigDecimal(int parameterIndex, BigDecimal v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setString(int parameterIndex, String v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setNString(int parameterIndex, String v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setDate(int parameterIndex, java.sql.Date v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setDate(int parameterIndex, java.sql.Date v, Calendar cal) throws SQLException {
checkClosed();
setArg(parameterIndex, cal != null ? new SqlCalendarDate(v, cal.getTimeZone()) : v);
}
@Override
public void setTime(int parameterIndex, java.sql.Time v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setTime(int parameterIndex, java.sql.Time v, Calendar cal) throws SQLException {
checkClosed();
setArg(parameterIndex, cal != null ? new SqlCalendarTime(v, cal.getTimeZone()) : v);
}
@Override
public void setTimestamp(int parameterIndex, java.sql.Timestamp v) throws SQLException {
checkClosed();
setArg(parameterIndex, v);
}
@Override
public void setTimestamp(int parameterIndex, java.sql.Timestamp v, Calendar cal) throws SQLException {
checkClosed();
setArg(parameterIndex, cal != null ? new SqlCalendarTimestamp(v, cal.getTimeZone()) : v);
}
// Generic (setObject)
@Override
public void setObject(int parameterIndex, Object v) throws SQLException {
checkClosed();
if (v == null || isSetObjectCompatible(v.getClass())) {
setArg(parameterIndex, v);
} else {
throw getErrorReporter().errorParameterValueNotSupported("object");
}
}
@Override
public void setObject(int parameterIndex, Object v, int targetSqlType) throws SQLException {
setObject(parameterIndex, v);
}
@Override
public void setObject(int parameterIndex, Object v, SQLType targetSqlType) throws SQLException {
if (targetSqlType == null) {
setObject(parameterIndex, v);
} else {
setObject(parameterIndex, v, targetSqlType.getVendorTypeNumber());
}
}
@Override
public void setObject(int parameterIndex, Object v, int targetSqlType, int scaleOrLength) throws SQLException {
setObject(parameterIndex, v, targetSqlType);
}
@Override
public void setObject(int parameterIndex, Object v, SQLType targetSqlType, int scaleOrLength) throws SQLException {
if (targetSqlType == null) {
setObject(parameterIndex, v);
} else {
setObject(parameterIndex, v, targetSqlType.getVendorTypeNumber());
}
}
// Unsupported
@Override
public void setBytes(int parameterIndex, byte[] v) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setBytes");
}
@Override
public void setRef(int parameterIndex, Ref x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setRef");
}
@Override
public void setRowId(int parameterIndex, RowId x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setRowId");
}
@Override
public void setURL(int parameterIndex, URL v) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setURL");
}
// Unsupported - streams
@Override
public void setAsciiStream(int parameterIndex, InputStream v) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setAsciiStream");
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setAsciiStream");
}
@Override
public void setAsciiStream(int parameterIndex, InputStream v, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setAsciiStream");
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setBinaryStream");
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setBinaryStream");
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setBinaryStream");
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setCharacterStream");
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setCharacterStream");
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setCharacterStream");
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setNCharacterStream");
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setNCharacterStream");
}
@Override
public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setUnicodeStream");
}
// Unsupported - LOB, Array, SQLXML
@Override
public void setArray(int parameterIndex, Array x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setArray");
}
@Override
public void setBlob(int parameterIndex, Blob x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setBlob");
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setBlob");
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setBlob");
}
@Override
public void setClob(int parameterIndex, Clob x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setClob");
}
@Override
public void setClob(int parameterIndex, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setClob");
}
@Override
public void setClob(int parameterIndex, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setClob");
}
@Override
public void setNClob(int parameterIndex, NClob value) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setNClob");
}
@Override
public void setNClob(int parameterIndex, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setNClob");
}
@Override
public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setNClob");
}
@Override
public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(PreparedStatement.class, "setSQLXML");
}
}
| 6,055 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBColumn.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.util.Objects;
public class ADBColumn {
private final String name;
private final ADBDatatype type;
private final boolean optional;
public ADBColumn(String name, ADBDatatype type, boolean optional) {
this.name = Objects.requireNonNull(name);
this.type = Objects.requireNonNull(type);
this.optional = optional || type.isNullOrMissing() || type == ADBDatatype.ANY;
}
public String getName() {
return name;
}
public ADBDatatype getType() {
return type;
}
public boolean isOptional() {
return optional;
}
@Override
public String toString() {
return String.format("%s:%s%s", name, type.getTypeName(), optional ? "?" : "");
}
}
| 6,056 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBDatatype.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.sql.JDBCType;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public enum ADBDatatype {
TINYINT(1, "int8", JDBCType.TINYINT, Byte.class),
SMALLINT(2, "int16", JDBCType.SMALLINT, Short.class),
INTEGER(3, "int32", JDBCType.INTEGER, Integer.class),
BIGINT(4, "int64", JDBCType.BIGINT, Long.class),
//UINT8(5, null, JDBCType.OTHER),
//UINT16(6, null, JDBCType.OTHER),
//UINT32(7, null, JDBCType.OTHER),
//UINT64(8, null, JDBCType.OTHER),
//BINARY(9, "binary", JDBCType.VARBINARY, byte[].class),
//BITARRAY(10, null, JDBCType.VARBINARY),
FLOAT(11, "float", JDBCType.REAL, Float.class),
DOUBLE(12, "double", JDBCType.DOUBLE, Double.class),
STRING(13, "string", JDBCType.VARCHAR, String.class),
MISSING(14, "missing", JDBCType.OTHER, Void.class), // don't report as JDBCType.NULL
BOOLEAN(15, "boolean", JDBCType.BOOLEAN, Boolean.class),
DATETIME(16, "datetime", JDBCType.TIMESTAMP, java.sql.Timestamp.class),
DATE(17, "date", JDBCType.DATE, java.sql.Date.class),
TIME(18, "time", JDBCType.TIME, java.sql.Time.class),
DURATION(19, "duration", JDBCType.OTHER, String.class),
//POINT(20, "point", JDBCType.OTHER, Object.class),
//POINT3D(21, "point3d", JDBCType.OTHER, Object.class),
ARRAY(22, "array", JDBCType.OTHER, List.class),
MULTISET(23, "multiset", JDBCType.OTHER, List.class),
OBJECT(24, "object", JDBCType.OTHER, Map.class),
//SPARSOBJECT(25, null, null, JDBCType.OTHER),
//UNION(26, null, JDBCType.OTHER),
//ENUM(27, null, JDBCType.OTHER),
//TYPE(28, null, JDBCType.OTHER),
ANY(29, "any", JDBCType.OTHER, String.class),
//LINE(30, "line", JDBCType.OTHER, Object.class),
//POLYGON(31, "polygon", JDBCType.OTHER, Object.class),
//CIRCLE(32, "circle", JDBCType.OTHER, Object.class),
//RECTANGLE(33, "rectangle", JDBCType.OTHER, Object.class),
//INTERVAL(34, "interval", JDBCType.OTHER, Object.class),
//SYSTEM_NULL(35, null, null, JDBCType.OTHER),
YEARMONTHDURATION(36, "year-month-duration", JDBCType.OTHER, java.time.Period.class),
DAYTIMEDURATION(37, "day-time-duration", JDBCType.OTHER, java.time.Duration.class),
UUID(38, "uuid", JDBCType.OTHER, java.util.UUID.class),
//SHORTWITHOUTTYPEINFO(40, null, null, JDBCType.OTHER),
NULL(41, "null", JDBCType.NULL, Void.class);
//GEOMETRY(42, "geometry", JDBCType.OTHER, Object.class)
private static final ADBDatatype[] BY_TYPE_TAG;
private static final Map<String, ADBDatatype> BY_TYPE_NAME;
private final byte typeTag;
private final String typeName;
private final JDBCType jdbcType;
private final Class<?> javaClass;
ADBDatatype(int typeTag, String typeName, JDBCType jdbcType, Class<?> javaClass) {
this.typeTag = (byte) typeTag;
this.typeName = Objects.requireNonNull(typeName);
this.jdbcType = Objects.requireNonNull(jdbcType);
this.javaClass = Objects.requireNonNull(javaClass);
}
public byte getTypeTag() {
return typeTag;
}
public String getTypeName() {
return typeName;
}
public JDBCType getJdbcType() {
return jdbcType;
}
public Class<?> getJavaClass() {
return javaClass;
}
@Override
public String toString() {
return getTypeName();
}
public boolean isDerived() {
return this == OBJECT || isList();
}
public boolean isList() {
return this == ARRAY || this == MULTISET;
}
public boolean isNullOrMissing() {
return this == NULL || this == MISSING;
}
static {
ADBDatatype[] allTypes = ADBDatatype.values();
ADBDatatype[] byTypeTag = new ADBDatatype[findMaxTypeTag(allTypes) + 1];
Map<String, ADBDatatype> byTypeName = new HashMap<>();
for (ADBDatatype t : allTypes) {
byTypeTag[t.typeTag] = t;
byTypeName.put(t.typeName, t);
}
BY_TYPE_TAG = byTypeTag;
BY_TYPE_NAME = byTypeName;
}
public static ADBDatatype findByTypeTag(byte typeTag) {
return typeTag >= 0 && typeTag < BY_TYPE_TAG.length ? BY_TYPE_TAG[typeTag] : null;
}
public static ADBDatatype findByTypeName(String typeName) {
return BY_TYPE_NAME.get(typeName);
}
private static int findMaxTypeTag(ADBDatatype[] allTypes) {
int maxTypeTag = 0;
for (ADBDatatype type : allTypes) {
if (type.typeTag < 0) {
throw new IllegalStateException(type.getTypeName());
}
maxTypeTag = Math.max(type.typeTag, maxTypeTag);
}
return maxTypeTag;
}
public static String getDerivedRecordName(ADBDatatype type) {
switch (type) {
case OBJECT:
return "Record";
case ARRAY:
return "OrderedList";
case MULTISET:
return "UnorderedList";
default:
throw new IllegalArgumentException(String.valueOf(type));
}
}
}
| 6,057 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBProductVersion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
public class ADBProductVersion {
public static final String ASTERIXDB = "Apache AsterixDB";
private final String productName;
private final String productVersion;
private final int majorVersion;
private final int minorVersion;
public ADBProductVersion(String productName, String productVersion, int majorVersion, int minorVersion) {
this.productName = productName != null ? productName : ASTERIXDB;
this.productVersion = productVersion != null ? productVersion : majorVersion + "." + minorVersion;
this.majorVersion = majorVersion;
this.minorVersion = minorVersion;
}
public String getProductName() {
return productName;
}
public String getProductVersion() {
return productVersion;
}
public int getMajorVersion() {
return majorVersion;
}
public int getMinorVersion() {
return minorVersion;
}
public boolean isAtLeast(ADBProductVersion otherVersion) {
return majorVersion == otherVersion.majorVersion ? minorVersion >= otherVersion.minorVersion
: majorVersion > otherVersion.majorVersion;
}
@Override
public String toString() {
return productName + '/' + productVersion;
}
}
| 6,058 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBResultSet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date;
import java.sql.NClob;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLType;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectReader;
public class ADBResultSet extends ADBWrapperSupport implements java.sql.ResultSet {
static final int RESULT_SET_HOLDABILITY = HOLD_CURSORS_OVER_COMMIT;
static final int ST_BEFORE_FIRST = 0;
static final int ST_NEXT = 1;
static final int ST_AFTER_LAST = 2;
// lifecycle
final AtomicBoolean closed = new AtomicBoolean(false);
// metadata
final ADBResultSetMetaData metadata;
// navigation
final JsonParser rowParser;
final boolean rowParserOwnsResources;
final long maxRows;
int state;
long rowNumber;
ADBRowStore rowStore;
ObjectReader complexColumnReader;
int columnIndexOfLatestGet;
// Lifecycle
public ADBResultSet(ADBResultSetMetaData metadata, JsonParser rowParser, boolean rowParserOwnsResources,
long maxRows) {
this.metadata = Objects.requireNonNull(metadata);
this.rowParser = Objects.requireNonNull(rowParser);
this.rowParserOwnsResources = rowParserOwnsResources;
this.maxRows = maxRows;
this.state = ST_BEFORE_FIRST;
}
@Override
public void close() throws SQLException {
closeImpl(true);
}
void closeImpl(boolean notifyStatement) throws SQLException {
boolean wasClosed = closed.getAndSet(true);
if (wasClosed) {
return;
}
try {
rowParser.close();
} catch (IOException e) {
throw getErrorReporter().errorClosingResource(e);
} finally {
if (notifyStatement) {
metadata.statement.deregisterResultSet(this);
}
}
}
@Override
public boolean isClosed() {
return closed.get();
}
private void checkClosed() throws SQLException {
if (isClosed()) {
throw getErrorReporter().errorObjectClosed(ResultSet.class);
}
}
// Metadata
@Override
public ResultSetMetaData getMetaData() throws SQLException {
checkClosed();
return metadata;
}
// Navigation
@Override
public boolean next() throws SQLException {
checkClosed();
try {
switch (state) {
case ST_BEFORE_FIRST:
JsonToken token = rowParser.hasCurrentToken() ? rowParser.currentToken() : rowParser.nextToken();
if (token != JsonToken.START_ARRAY) {
throw getErrorReporter().errorInProtocol(String.valueOf(token));
}
initRowStore();
state = ST_NEXT;
// fall thru to ST_NEXT
case ST_NEXT:
token = rowParser.nextToken();
switch (token) {
case START_OBJECT:
if (maxRows > 0 && rowNumber == maxRows) {
state = ST_AFTER_LAST;
return false;
} else {
readRow();
rowNumber++;
return true;
}
case END_ARRAY:
state = ST_AFTER_LAST;
return false;
default:
throw getErrorReporter().errorInProtocol(String.valueOf(token));
}
case ST_AFTER_LAST:
return false;
default:
throw new IllegalStateException(String.valueOf(state));
}
} catch (JsonProcessingException e) {
throw getErrorReporter().errorInProtocol(e);
} catch (IOException e) {
throw getErrorReporter().errorInConnection(e);
}
}
private void initRowStore() {
rowStore = createRowStore(metadata.getColumnCount());
}
protected ADBRowStore createRowStore(int columnCount) {
return new ADBRowStore(this, columnCount);
}
private void readRow() throws SQLException {
rowStore.reset();
columnIndexOfLatestGet = -1;
if (rowParser.currentToken() != JsonToken.START_OBJECT) {
throw new IllegalStateException();
}
try {
while (rowParser.nextToken() == JsonToken.FIELD_NAME) {
String fieldName = rowParser.getCurrentName();
int columnIndex = metadata.findColumnIndexByName(fieldName);
boolean isKnownColumn = columnIndex >= 0;
ADBColumn column = isKnownColumn ? metadata.getColumnByIndex(columnIndex) : null;
switch (rowParser.nextToken()) {
case VALUE_NULL:
if (isKnownColumn) {
typeCheck(column, ADBDatatype.NULL);
rowStore.putNullColumn(columnIndex);
}
break;
case VALUE_TRUE:
if (isKnownColumn) {
typeCheck(column, ADBDatatype.BOOLEAN);
rowStore.putBooleanColumn(columnIndex, true);
}
break;
case VALUE_FALSE:
if (isKnownColumn) {
typeCheck(column, ADBDatatype.BOOLEAN);
rowStore.putBooleanColumn(columnIndex, false);
}
break;
case VALUE_NUMBER_INT:
if (isKnownColumn) {
typeCheck(column, ADBDatatype.BIGINT);
long v = rowParser.getLongValue();
rowStore.putInt64Column(columnIndex, v);
}
break;
case VALUE_STRING:
if (isKnownColumn) {
typeCheck(column, ADBDatatype.STRING);
char[] textChars = rowParser.getTextCharacters();
int textOffset = rowParser.getTextOffset();
int textLength = rowParser.getTextLength();
rowStore.putColumn(columnIndex, textChars, textOffset, textLength);
}
break;
case START_OBJECT:
if (isKnownColumn) {
typeCheck(column, ADBDatatype.OBJECT);
Map<?, ?> valueMap = getComplexColumnReader().readValue(rowParser, Map.class);
rowStore.putRecordColumn(columnIndex, valueMap);
} else {
rowParser.skipChildren();
}
break;
case START_ARRAY:
if (isKnownColumn) {
typeCheck(column, ADBDatatype.ARRAY);
List<?> valueList = getComplexColumnReader().readValue(rowParser, List.class);
rowStore.putArrayColumn(columnIndex, valueList);
} else {
rowParser.skipChildren();
}
break;
default:
throw getErrorReporter().errorInProtocol(String.valueOf(rowParser.currentToken()));
}
}
} catch (JsonProcessingException e) {
throw getErrorReporter().errorInProtocol(e);
} catch (IOException e) {
throw getErrorReporter().errorInConnection(e);
}
}
private void typeCheck(ADBColumn column, ADBDatatype parsedType) throws SQLException {
ADBDatatype columnType = column.getType();
boolean typeMatch;
switch (parsedType) {
case NULL:
typeMatch = column.isOptional();
break;
case STRING:
// special handling for parsed 'string' because it can contain any primitive type.
// we only need to check that the expected type is not derived (i.e primitive/null/missing/any)
typeMatch = !columnType.isDerived();
break;
case ARRAY:
typeMatch = columnType == ADBDatatype.ANY || columnType.isList();
break;
case BOOLEAN:
case BIGINT:
case OBJECT:
typeMatch = columnType == ADBDatatype.ANY || columnType == parsedType;
break;
default:
// unexpected
throw getErrorReporter().errorInProtocol(parsedType.toString());
}
if (!typeMatch) {
throw getErrorReporter().errorUnexpectedColumnValue(parsedType, column.getName());
}
}
@Override
public void beforeFirst() throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("FORWARD_ONLY");
}
@Override
public void afterLast() throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("FORWARD_ONLY");
}
@Override
public boolean first() throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("FORWARD_ONLY");
}
@Override
public boolean last() throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("FORWARD_ONLY");
}
@Override
public boolean previous() throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("FORWARD_ONLY");
}
@Override
public boolean relative(int rows) throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("FORWARD_ONLY");
}
@Override
public boolean absolute(int row) throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("FORWARD_ONLY");
}
@Override
public boolean isBeforeFirst() throws SQLException {
checkClosed();
return state == ST_BEFORE_FIRST;
}
@Override
public boolean isAfterLast() throws SQLException {
checkClosed();
return state == ST_AFTER_LAST;
}
@Override
public boolean isFirst() throws SQLException {
checkClosed();
return state == ST_NEXT && rowNumber == 1;
}
@Override
public boolean isLast() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "isLast");
}
@Override
public int getRow() throws SQLException {
checkClosed();
return state == ST_NEXT ? (int) rowNumber : 0;
}
private void checkCursorPosition() throws SQLException {
if (state != ST_NEXT) {
throw getErrorReporter().errorNoCurrentRow();
}
}
private ObjectReader getComplexColumnReader() {
if (complexColumnReader == null) {
ADBDriverContext ctx = metadata.statement.connection.protocol.getDriverContext();
ADBRowStore tmpStore = createRowStore(1);
complexColumnReader = tmpStore.createComplexColumnObjectReader(ctx.getAdmFormatObjectReader());
}
return complexColumnReader;
}
// Column accessors
@Override
public int findColumn(String columnLabel) throws SQLException {
checkClosed();
int columnIndex = metadata.findColumnIndexByName(columnLabel);
if (columnIndex < 0) {
throw getErrorReporter().errorColumnNotFound(columnLabel);
}
return columnIndex + 1;
}
// Column accessors: basic types
private int fetchColumnIndex(int columnNumber) throws SQLException {
if (columnNumber < 1 || columnNumber > metadata.getColumnCount()) {
throw getErrorReporter().errorColumnNotFound(String.valueOf(columnNumber));
}
return columnNumber - 1;
}
private int fetchColumnIndex(String columnLabel) throws SQLException {
int columnIndex = metadata.findColumnIndexByName(columnLabel);
if (columnIndex < 0) {
throw getErrorReporter().errorColumnNotFound(columnLabel);
}
return columnIndex;
}
@Override
public boolean wasNull() throws SQLException {
checkClosed();
checkCursorPosition();
if (columnIndexOfLatestGet < 0) {
return false;
}
ADBDatatype columnValueType = rowStore.getColumnType(columnIndexOfLatestGet);
return columnValueType == ADBDatatype.NULL || columnValueType == ADBDatatype.MISSING;
}
@Override
public boolean getBoolean(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getBooleanImpl(fetchColumnIndex(columnNumber));
}
@Override
public boolean getBoolean(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getBooleanImpl(fetchColumnIndex(columnLabel));
}
private boolean getBooleanImpl(int columnIndex) throws SQLException {
boolean v = rowStore.getBoolean(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public byte getByte(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getByteImpl(fetchColumnIndex(columnNumber));
}
@Override
public byte getByte(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getByteImpl(fetchColumnIndex(columnLabel));
}
private byte getByteImpl(int columnIndex) throws SQLException {
byte v = rowStore.getByte(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public short getShort(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getShortImpl(fetchColumnIndex(columnNumber));
}
@Override
public short getShort(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getShortImpl(fetchColumnIndex(columnLabel));
}
private short getShortImpl(int columnIndex) throws SQLException {
short v = rowStore.getShort(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public int getInt(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getIntImpl(fetchColumnIndex(columnNumber));
}
@Override
public int getInt(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getIntImpl(fetchColumnIndex(columnLabel));
}
private int getIntImpl(int columnIndex) throws SQLException {
int v = rowStore.getInt(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public long getLong(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getLongImpl(fetchColumnIndex(columnNumber));
}
@Override
public long getLong(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getLongImpl(fetchColumnIndex(columnLabel));
}
private long getLongImpl(int columnIndex) throws SQLException {
long v = rowStore.getLong(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public float getFloat(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getFloatImpl(fetchColumnIndex(columnNumber));
}
@Override
public float getFloat(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getFloatImpl(fetchColumnIndex(columnLabel));
}
private float getFloatImpl(int columnIndex) throws SQLException {
float v = rowStore.getFloat(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public double getDouble(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getDoubleImpl(fetchColumnIndex(columnNumber));
}
@Override
public double getDouble(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getDoubleImpl(fetchColumnIndex(columnLabel));
}
private double getDoubleImpl(int columnIndex) throws SQLException {
double v = rowStore.getDouble(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public BigDecimal getBigDecimal(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getBigDecimalImpl(fetchColumnIndex(columnNumber), false, -1);
}
@Override
public BigDecimal getBigDecimal(int columnNumber, int scale) throws SQLException {
checkClosed();
checkCursorPosition();
return getBigDecimalImpl(fetchColumnIndex(columnNumber), true, scale);
}
@Override
public BigDecimal getBigDecimal(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getBigDecimalImpl(fetchColumnIndex(columnLabel), false, -1);
}
@Override
public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException {
checkClosed();
checkCursorPosition();
return getBigDecimalImpl(fetchColumnIndex(columnLabel), true, scale);
}
private BigDecimal getBigDecimalImpl(int columnIndex, boolean setScale, int scale) throws SQLException {
BigDecimal v = rowStore.getBigDecimal(columnIndex, setScale, scale);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public Date getDate(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getDateImpl(fetchColumnIndex(columnNumber), null);
}
@Override
public Date getDate(int columnNumber, Calendar cal) throws SQLException {
checkClosed();
checkCursorPosition();
return getDateImpl(fetchColumnIndex(columnNumber), cal);
}
@Override
public Date getDate(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getDateImpl(fetchColumnIndex(columnLabel), null);
}
@Override
public Date getDate(String columnLabel, Calendar cal) throws SQLException {
checkClosed();
checkCursorPosition();
return getDateImpl(fetchColumnIndex(columnLabel), cal);
}
private Date getDateImpl(int columnIndex, Calendar cal) throws SQLException {
Date v = rowStore.getDate(columnIndex, cal);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public Time getTime(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getTimeImpl(fetchColumnIndex(columnNumber), null);
}
@Override
public Time getTime(int columnNumber, Calendar cal) throws SQLException {
checkClosed();
checkCursorPosition();
return getTimeImpl(fetchColumnIndex(columnNumber), cal);
}
@Override
public Time getTime(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getTimeImpl(fetchColumnIndex(columnLabel), null);
}
@Override
public Time getTime(String columnLabel, Calendar cal) throws SQLException {
checkClosed();
checkCursorPosition();
return getTimeImpl(fetchColumnIndex(columnLabel), cal);
}
private Time getTimeImpl(int columnIndex, Calendar cal) throws SQLException {
Time v = rowStore.getTime(columnIndex, cal);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public Timestamp getTimestamp(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getTimestampImpl(fetchColumnIndex(columnNumber), null);
}
@Override
public Timestamp getTimestamp(int columnNumber, Calendar cal) throws SQLException {
checkClosed();
checkCursorPosition();
return getTimestampImpl(fetchColumnIndex(columnNumber), cal);
}
@Override
public Timestamp getTimestamp(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getTimestampImpl(fetchColumnIndex(columnLabel), null);
}
@Override
public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException {
checkClosed();
checkCursorPosition();
return getTimestampImpl(fetchColumnIndex(columnLabel), cal);
}
private Timestamp getTimestampImpl(int columnIndex, Calendar cal) throws SQLException {
Timestamp v = rowStore.getTimestamp(columnIndex, cal);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public String getString(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getStringImpl(fetchColumnIndex(columnNumber));
}
@Override
public String getString(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getStringImpl(fetchColumnIndex(columnLabel));
}
@Override
public String getNString(int columnNumber) throws SQLException {
return getString(columnNumber);
}
@Override
public String getNString(String columnLabel) throws SQLException {
return getString(columnLabel);
}
private String getStringImpl(int columnIndex) throws SQLException {
String v = rowStore.getString(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public byte[] getBytes(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getBytesImpl(fetchColumnIndex(columnNumber));
}
@Override
public byte[] getBytes(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getBytesImpl(fetchColumnIndex(columnLabel));
}
private byte[] getBytesImpl(int columnIndex) throws SQLException {
byte[] v = rowStore.getBinary(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
// Column accessor: Generic (getObject)
@Override
public Object getObject(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getObjectImpl(fetchColumnIndex(columnNumber));
}
@Override
public Object getObject(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getObjectImpl(fetchColumnIndex(columnLabel));
}
@Override
public <T> T getObject(int columnNumber, Class<T> type) throws SQLException {
checkClosed();
checkCursorPosition();
if (type == null) {
throw getErrorReporter().errorParameterValueNotSupported("type");
}
return getObjectImpl(fetchColumnIndex(columnNumber), type);
}
@Override
public <T> T getObject(String columnLabel, Class<T> type) throws SQLException {
checkClosed();
checkCursorPosition();
if (type == null) {
throw getErrorReporter().errorParameterValueNotSupported("type");
}
return getObjectImpl(fetchColumnIndex(columnLabel), type);
}
private Object getObjectImpl(int columnIndex) throws SQLException {
ADBColumn column = metadata.getColumnByIndex(columnIndex);
return getObjectImpl(columnIndex, column.getType().getJavaClass());
}
private <T> T getObjectImpl(int columnIndex, Class<T> type) throws SQLException {
T v = rowStore.getObject(columnIndex, type);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public Object getObject(int columnIndex, Map<String, Class<?>> map) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getObject");
}
@Override
public Object getObject(String columnLabel, Map<String, Class<?>> map) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getObject");
}
// Column accessors: streams
@Override
public InputStream getBinaryStream(int columnIndex) throws SQLException {
checkClosed();
checkCursorPosition();
return getBinaryStreamImpl(fetchColumnIndex(columnIndex));
}
@Override
public InputStream getBinaryStream(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getBinaryStreamImpl(fetchColumnIndex(columnLabel));
}
private InputStream getBinaryStreamImpl(int columnIndex) throws SQLException {
InputStream v = rowStore.getInputStream(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public Reader getCharacterStream(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getCharacterStreamImpl(fetchColumnIndex(columnNumber));
}
@Override
public Reader getCharacterStream(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getCharacterStreamImpl(fetchColumnIndex(columnLabel));
}
private Reader getCharacterStreamImpl(int columnIndex) throws SQLException {
Reader v = rowStore.getCharacterStream(columnIndex);
columnIndexOfLatestGet = columnIndex;
return v;
}
@Override
public Reader getNCharacterStream(int columnIndex) throws SQLException {
return getCharacterStream(columnIndex);
}
@Override
public Reader getNCharacterStream(String columnLabel) throws SQLException {
return getCharacterStream(columnLabel);
}
@Override
public InputStream getAsciiStream(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getAsciiStreamImpl(fetchColumnIndex(columnNumber));
}
@Override
public InputStream getAsciiStream(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getAsciiStreamImpl(fetchColumnIndex(columnLabel));
}
private InputStream getAsciiStreamImpl(int columnIndex) throws SQLException {
String value = getStringImpl(columnIndex);
return value != null ? new ByteArrayInputStream(value.getBytes(StandardCharsets.US_ASCII)) : null;
}
@Override
public InputStream getUnicodeStream(int columnNumber) throws SQLException {
checkClosed();
checkCursorPosition();
return getUnicodeStreamImpl(fetchColumnIndex(columnNumber));
}
@Override
public InputStream getUnicodeStream(String columnLabel) throws SQLException {
checkClosed();
checkCursorPosition();
return getUnicodeStreamImpl(fetchColumnIndex(columnLabel));
}
private InputStream getUnicodeStreamImpl(int columnIndex) throws SQLException {
String value = getStringImpl(columnIndex);
return value != null ? new ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_16)) : null;
}
// Column accessors: unsupported
@Override
public Ref getRef(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getRef");
}
@Override
public Ref getRef(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getRef");
}
@Override
public RowId getRowId(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getRowId");
}
@Override
public RowId getRowId(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getRowId");
}
@Override
public URL getURL(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getURL");
}
@Override
public URL getURL(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getURL");
}
// Column accessors: unsupported - LOB, Array, SQLXML
@Override
public Array getArray(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getArray");
}
@Override
public Array getArray(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getArray");
}
@Override
public Blob getBlob(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getBlob");
}
@Override
public Blob getBlob(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getBlob");
}
@Override
public Clob getClob(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getClob");
}
@Override
public Clob getClob(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getClob");
}
@Override
public NClob getNClob(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getNClob");
}
@Override
public NClob getNClob(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getNClob");
}
@Override
public SQLXML getSQLXML(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getSQLXML");
}
@Override
public SQLXML getSQLXML(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "getSQLXML");
}
// Updates (unsupported)
// Column setters
@Override
public void updateArray(int columnIndex, Array x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateArray");
}
@Override
public void updateArray(String columnLabel, Array x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateArray");
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateAsciiStream");
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateAsciiStream");
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateAsciiStream");
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateAsciiStream");
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateAsciiStream");
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateAsciiStream");
}
@Override
public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBigDecimal");
}
@Override
public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBigDecimal");
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBinaryStream");
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBinaryStream");
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBinaryStream");
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBinaryStream");
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBinaryStream");
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBinaryStream");
}
@Override
public void updateBlob(int columnIndex, Blob x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBlob");
}
@Override
public void updateBlob(String columnLabel, Blob x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBlob");
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBlob");
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBlob");
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBlob");
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBlob");
}
@Override
public void updateBoolean(int columnIndex, boolean x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBoolean");
}
@Override
public void updateBoolean(String columnLabel, boolean x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBoolean");
}
@Override
public void updateByte(int columnIndex, byte x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateByte");
}
@Override
public void updateByte(String columnLabel, byte x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateByte");
}
@Override
public void updateBytes(int columnIndex, byte[] x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBytes");
}
@Override
public void updateBytes(String columnLabel, byte[] x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateBytes");
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateCharacterStream");
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateCharacterStream");
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateCharacterStream");
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateCharacterStream");
}
@Override
public void updateCharacterStream(int columnIndex, Reader x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateCharacterStream");
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateCharacterStream");
}
@Override
public void updateClob(int columnIndex, Clob x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateClob");
}
@Override
public void updateClob(String columnLabel, Clob x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateClob");
}
@Override
public void updateClob(int columnIndex, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateClob");
}
@Override
public void updateClob(String columnLabel, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateClob");
}
@Override
public void updateClob(int columnIndex, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateClob");
}
@Override
public void updateClob(String columnLabel, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateClob");
}
@Override
public void updateDate(int columnIndex, Date x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateDate");
}
@Override
public void updateDate(String columnLabel, Date x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateDate");
}
@Override
public void updateDouble(int columnIndex, double x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateDouble");
}
@Override
public void updateDouble(String columnLabel, double x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateDouble");
}
@Override
public void updateFloat(int columnIndex, float x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateFloat");
}
@Override
public void updateFloat(String columnLabel, float x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateFloat");
}
@Override
public void updateInt(int columnIndex, int x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateInt");
}
@Override
public void updateInt(String columnLabel, int x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateInt");
}
@Override
public void updateLong(int columnIndex, long x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateLong");
}
@Override
public void updateLong(String columnLabel, long x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateLong");
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNCharacterStream");
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNCharacterStream");
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNCharacterStream");
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNCharacterStream");
}
@Override
public void updateNClob(int columnIndex, NClob nClob) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNClob");
}
@Override
public void updateNClob(String columnLabel, NClob nClob) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNClob");
}
@Override
public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNClob");
}
@Override
public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNClob");
}
@Override
public void updateNClob(int columnIndex, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNClob");
}
@Override
public void updateNClob(String columnLabel, Reader reader) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNClob");
}
@Override
public void updateNString(int columnIndex, String nString) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNString");
}
@Override
public void updateNString(String columnLabel, String nString) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNString");
}
@Override
public void updateNull(int columnIndex) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNull");
}
@Override
public void updateNull(String columnLabel) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateNull");
}
@Override
public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateObject");
}
@Override
public void updateObject(int columnIndex, Object x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateObject");
}
@Override
public void updateObject(int columnIndex, Object x, SQLType targetSqlType, int scaleOrLength) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateObject");
}
@Override
public void updateObject(String columnLabel, Object x, SQLType targetSqlType, int scaleOrLength)
throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateObject");
}
@Override
public void updateObject(int columnIndex, Object x, SQLType targetSqlType) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateObject");
}
@Override
public void updateObject(String columnLabel, Object x, SQLType targetSqlType) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateObject");
}
@Override
public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateObject");
}
@Override
public void updateObject(String columnLabel, Object x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateObject");
}
@Override
public void updateRef(int columnIndex, Ref x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateRef");
}
@Override
public void updateRef(String columnLabel, Ref x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateRef");
}
@Override
public void updateRowId(int columnIndex, RowId x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateRowId");
}
@Override
public void updateRowId(String columnLabel, RowId x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateRowId");
}
@Override
public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateSQLXML");
}
@Override
public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateSQLXML");
}
@Override
public void updateShort(int columnIndex, short x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateShort");
}
@Override
public void updateShort(String columnLabel, short x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateShort");
}
@Override
public void updateString(int columnIndex, String x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateString");
}
@Override
public void updateString(String columnLabel, String x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateString");
}
@Override
public void updateTime(int columnIndex, Time x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateTime");
}
@Override
public void updateTime(String columnLabel, Time x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateTime");
}
@Override
public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateTimestamp");
}
@Override
public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateTimestamp");
}
// Update navigation and state (unsupported)
@Override
public void insertRow() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "insertRow");
}
@Override
public void updateRow() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "updateRow");
}
@Override
public void deleteRow() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "deleteRow");
}
@Override
public void refreshRow() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "refreshRow");
}
@Override
public void moveToInsertRow() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "moveToInsertRow");
}
@Override
public void moveToCurrentRow() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "moveToCurrentRow");
}
@Override
public boolean rowInserted() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "rowInserted");
}
@Override
public boolean rowUpdated() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "rowUpdated");
}
@Override
public boolean rowDeleted() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "rowDeleted");
}
@Override
public void cancelRowUpdates() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(ResultSet.class, "cancelRowUpdates");
}
// Errors and warnings
@Override
public SQLWarning getWarnings() throws SQLException {
checkClosed();
return null;
}
@Override
public void clearWarnings() throws SQLException {
checkClosed();
}
@Override
protected ADBErrorReporter getErrorReporter() {
return metadata.getErrorReporter();
}
// Ownership
@Override
public Statement getStatement() throws SQLException {
checkClosed();
return metadata.statement.getResultSetStatement(this);
}
// Cursor - related
@Override
public String getCursorName() throws SQLException {
checkClosed();
return "";
}
@Override
public int getType() throws SQLException {
checkClosed();
return TYPE_FORWARD_ONLY;
}
@Override
public int getConcurrency() throws SQLException {
checkClosed();
return ResultSet.CONCUR_READ_ONLY;
}
@Override
public int getHoldability() throws SQLException {
checkClosed();
return RESULT_SET_HOLDABILITY;
}
@Override
public int getFetchDirection() throws SQLException {
checkClosed();
return FETCH_FORWARD;
}
@Override
public void setFetchDirection(int direction) throws SQLException {
checkClosed();
if (direction != ResultSet.FETCH_FORWARD) {
throw getErrorReporter().errorParameterValueNotSupported("direction");
}
}
@Override
public int getFetchSize() throws SQLException {
checkClosed();
return 1;
}
@Override
public void setFetchSize(int rows) throws SQLException {
checkClosed();
// ignore value
}
}
| 6,059 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBConnection.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLPermission;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
public class ADBConnection extends ADBWrapperSupport implements Connection {
protected final ADBProtocolBase protocol;
protected final String url;
protected final ADBProductVersion databaseVersion;
protected final ADBDriverProperty.CatalogDataverseMode catalogDataverseMode;
protected final boolean catalogIncludesSchemaless;
protected final boolean sqlCompatMode;
private final AtomicBoolean closed;
private final ConcurrentLinkedQueue<ADBStatement> statements;
private volatile SQLWarning warning;
private volatile ADBMetaStatement metaStatement;
private volatile String catalog;
private volatile String schema;
private final boolean databaseEntitySupported;
// Lifecycle
public ADBConnection(ADBProtocolBase protocol, String url, ADBProductVersion databaseVersion,
String dataverseCanonicalName, Map<ADBDriverProperty, Object> properties, SQLWarning connectWarning)
throws SQLException {
this.url = Objects.requireNonNull(url);
this.protocol = Objects.requireNonNull(protocol);
this.databaseVersion = databaseVersion;
this.statements = new ConcurrentLinkedQueue<>();
this.warning = connectWarning;
this.closed = new AtomicBoolean(false);
this.sqlCompatMode = (Boolean) ADBDriverProperty.Common.SQL_COMPAT_MODE.fetchPropertyValue(properties);
this.catalogDataverseMode = getCatalogDataverseMode(properties, protocol.getErrorReporter());
this.catalogIncludesSchemaless =
(Boolean) ADBDriverProperty.Common.CATALOG_INCLUDES_SCHEMALESS.fetchPropertyValue(properties);
this.databaseEntitySupported = checkDatabaseEntitySupport();
initCatalogSchema(protocol, dataverseCanonicalName);
}
protected void initCatalogSchema(ADBProtocolBase protocol, String dataverseCanonicalName) throws SQLException {
switch (catalogDataverseMode) {
case CATALOG:
if (dataverseCanonicalName == null || dataverseCanonicalName.isEmpty()) {
catalog = isDatabaseEntitySupported()
? protocol.getDefaultDatabase() + "/" + protocol.getDefaultDataverse()
: protocol.getDefaultDataverse();
} else {
catalog = dataverseCanonicalName;
}
// schema = null
break;
case CATALOG_SCHEMA:
if (dataverseCanonicalName == null || dataverseCanonicalName.isEmpty()) {
if (isDatabaseEntitySupported()) {
catalog = protocol.getDefaultDatabase();
schema = protocol.getDefaultDataverse();
} else {
catalog = protocol.getDefaultDataverse();
// schema = null
}
} else {
String[] parts = dataverseCanonicalName.split("/");
switch (parts.length) {
case 1:
catalog = parts[0];
break;
case 2:
catalog = parts[0];
schema = parts[1];
break;
default:
throw protocol.getErrorReporter().errorInConnection(dataverseCanonicalName); //TODO:FIXME
}
}
break;
default:
throw new IllegalStateException();
}
}
@Override
public void close() throws SQLException {
closeImpl(null);
}
@Override
public void abort(Executor executor) throws SQLException {
if (executor == null) {
throw getErrorReporter().errorParameterValueNotSupported("executor");
}
SecurityManager sec = System.getSecurityManager();
if (sec != null) {
sec.checkPermission(new SQLPermission("callAbort"));
}
closeImpl(executor);
}
protected void closeImpl(Executor executor) throws SQLException {
boolean wasClosed = closed.getAndSet(true);
if (wasClosed) {
return;
}
if (executor == null) {
closeStatementsAndProtocol();
} else {
executor.execute(() -> {
try {
closeStatementsAndProtocol();
} catch (SQLException e) {
if (getLogger().isLoggable(Level.FINE)) {
getLogger().log(Level.FINE, e.getMessage(), e);
}
}
});
}
}
protected void closeStatementsAndProtocol() throws SQLException {
SQLException err = null;
try {
closeRegisteredStatements();
} catch (SQLException e) {
err = e;
}
try {
protocol.close();
} catch (SQLException e) {
if (err != null) {
e.addSuppressed(err);
}
err = e;
}
if (err != null) {
throw err;
}
}
@Override
public boolean isClosed() {
return closed.get();
}
private void checkClosed() throws SQLException {
if (isClosed()) {
throw getErrorReporter().errorObjectClosed(Connection.class, ADBErrorReporter.SQLState.CONNECTION_CLOSED);
}
}
// Connectivity
@Override
public boolean isValid(int timeoutSeconds) throws SQLException {
if (isClosed()) {
return false;
}
if (timeoutSeconds < 0) {
throw getErrorReporter().errorParameterValueNotSupported("timeoutSeconds");
}
return protocol.ping(timeoutSeconds);
}
@Override
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "setNetworkTimeout");
}
@Override
public int getNetworkTimeout() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "getNetworkTimeout");
}
// Metadata
@Override
public DatabaseMetaData getMetaData() throws SQLException {
checkClosed();
ADBMetaStatement metaStatement = getOrCreateMetaStatement();
return createDatabaseMetaData(metaStatement);
}
private ADBMetaStatement getOrCreateMetaStatement() {
ADBMetaStatement stmt = metaStatement;
if (stmt == null) {
synchronized (this) {
stmt = metaStatement;
if (stmt == null) {
stmt = createMetaStatement();
registerStatement(stmt);
metaStatement = stmt;
}
}
}
return stmt;
}
protected ADBMetaStatement createMetaStatement() {
return new ADBMetaStatement(this);
}
protected ADBDatabaseMetaData createDatabaseMetaData(ADBMetaStatement metaStatement) {
return new ADBDatabaseMetaData(metaStatement, databaseVersion);
}
// Statement construction
@Override
public Statement createStatement() throws SQLException {
checkClosed();
return createStatementImpl();
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException {
return createStatement(resultSetType, resultSetConcurrency, getHoldability());
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
checkClosed();
checkResultSetConfig(resultSetType, resultSetConcurrency, resultSetHoldability);
return createStatementImpl();
}
private void checkResultSetConfig(int resultSetType, int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
boolean ok = resultSetType == ResultSet.TYPE_FORWARD_ONLY && resultSetConcurrency == ResultSet.CONCUR_READ_ONLY;
if (!ok) {
throw getErrorReporter().errorParameterValueNotSupported("resultSetType/resultSetConcurrency");
}
if (resultSetHoldability != ADBResultSet.RESULT_SET_HOLDABILITY) {
if (getLogger().isLoggable(Level.FINE)) {
getLogger().log(Level.FINE,
getErrorReporter().warningParameterValueNotSupported("ResultSetHoldability"));
}
}
}
protected ADBStatement createStatementImpl() {
ADBStatement stmt = new ADBStatement(this);
registerStatement(stmt);
return stmt;
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
checkClosed();
return prepareStatementImpl(sql);
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency)
throws SQLException {
return prepareStatement(sql, resultSetType, resultSetConcurrency, getHoldability());
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
checkClosed();
checkResultSetConfig(resultSetType, resultSetConcurrency, resultSetHoldability);
return prepareStatementImpl(sql);
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "prepareStatement");
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "prepareStatement");
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "prepareStatement");
}
private ADBPreparedStatement prepareStatementImpl(String sql) throws SQLException {
ADBPreparedStatement stmt = new ADBPreparedStatement(this, sql);
registerStatement(stmt);
return stmt;
}
@Override
public CallableStatement prepareCall(String sql) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "prepareCall");
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "prepareCall");
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "prepareCall");
}
@Override
public String nativeSQL(String sql) throws SQLException {
checkClosed();
return sql;
}
@Override
public String getCatalog() throws SQLException {
checkClosed();
return catalog;
}
@Override
public void setCatalog(String catalog) throws SQLException {
checkClosed();
this.catalog = catalog;
}
@Override
public String getSchema() throws SQLException {
checkClosed();
return schema;
}
@Override
public void setSchema(String schema) throws SQLException {
checkClosed();
if (catalogDataverseMode == ADBDriverProperty.CatalogDataverseMode.CATALOG
&& (schema != null && !schema.isEmpty())) {
throw getErrorReporter().errorInConnection(schema); //TODO:FIXME:REVIEW make no-op?
}
this.schema = schema;
}
protected String getDataverseCanonicalName() {
switch (catalogDataverseMode) {
case CATALOG:
return catalog;
case CATALOG_SCHEMA:
String c = catalog;
String s = schema;
return s == null ? c : c + "/" + s;
default:
throw new IllegalStateException();
}
}
protected static ADBDriverProperty.CatalogDataverseMode getCatalogDataverseMode(
Map<ADBDriverProperty, Object> properties, ADBErrorReporter errorReporter) throws SQLException {
int mode = ((Number) ADBDriverProperty.Common.CATALOG_DATAVERSE_MODE.fetchPropertyValue(properties)).intValue();
try {
return ADBDriverProperty.CatalogDataverseMode.valueOf(mode);
} catch (IllegalArgumentException e) {
throw errorReporter.errorInConnection(String.valueOf(mode)); //TODO:FIXME
}
}
// Statement lifecycle
private void registerStatement(ADBStatement stmt) {
statements.add(Objects.requireNonNull(stmt));
}
void deregisterStatement(ADBStatement stmt) {
statements.remove(Objects.requireNonNull(stmt));
}
private void closeRegisteredStatements() throws SQLException {
SQLException err = null;
ADBStatement statement;
while ((statement = statements.poll()) != null) {
try {
statement.closeImpl(true, false);
} catch (SQLException e) {
if (err != null) {
e.addSuppressed(err);
}
err = e;
}
}
if (err != null) {
throw err;
}
}
// Transaction control
@Override
public int getTransactionIsolation() throws SQLException {
checkClosed();
return Connection.TRANSACTION_READ_COMMITTED;
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
checkClosed();
switch (level) {
case Connection.TRANSACTION_READ_COMMITTED:
break;
case Connection.TRANSACTION_READ_UNCOMMITTED:
case Connection.TRANSACTION_REPEATABLE_READ:
case Connection.TRANSACTION_SERIALIZABLE:
if (getLogger().isLoggable(Level.FINE)) {
getLogger().log(Level.FINE,
getErrorReporter().warningParameterValueNotSupported("TransactionIsolationLevel"));
}
break;
default:
throw getErrorReporter().errorParameterValueNotSupported("TransactionIsolationLevel");
}
}
@Override
public int getHoldability() throws SQLException {
checkClosed();
return ResultSet.HOLD_CURSORS_OVER_COMMIT;
}
@Override
public void setHoldability(int holdability) throws SQLException {
checkClosed();
switch (holdability) {
case ResultSet.HOLD_CURSORS_OVER_COMMIT:
break;
case ResultSet.CLOSE_CURSORS_AT_COMMIT:
if (getLogger().isLoggable(Level.FINE)) {
getLogger().log(Level.FINE, getErrorReporter().warningParameterValueNotSupported("Holdability"));
}
break;
default:
throw getErrorReporter().errorParameterValueNotSupported("Holdability");
}
}
@Override
public boolean getAutoCommit() throws SQLException {
checkClosed();
return true;
}
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
checkClosed();
}
@Override
public void commit() throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("AutoCommit");
}
@Override
public void rollback() throws SQLException {
checkClosed();
throw getErrorReporter().errorIncompatibleMode("AutoCommit");
}
@Override
public Savepoint setSavepoint() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "setSavepoint");
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "setSavepoint");
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "releaseSavepoint");
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "rollback");
}
// Value construction
@Override
public Clob createClob() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "createClob");
}
@Override
public Blob createBlob() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "createBlob");
}
@Override
public NClob createNClob() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "createNClob");
}
@Override
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "createArrayOf");
}
@Override
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "createStruct");
}
@Override
public SQLXML createSQLXML() throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "createSQLXML");
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
checkClosed();
return Collections.emptyMap();
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
throw getErrorReporter().errorMethodNotSupported(Connection.class, "setTypeMap");
}
// Unsupported hints (ignored)
@Override
public boolean isReadOnly() throws SQLException {
checkClosed();
return false;
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
checkClosed();
}
// Errors and warnings
@Override
public SQLWarning getWarnings() throws SQLException {
checkClosed();
return warning;
}
@Override
public void clearWarnings() throws SQLException {
checkClosed();
warning = null;
}
@Override
protected ADBErrorReporter getErrorReporter() {
return protocol.getErrorReporter();
}
protected Logger getLogger() {
return protocol.getLogger();
}
// Miscellaneous unsupported features (error is raised)
@Override
public String getClientInfo(String name) throws SQLException {
checkClosed();
return null;
}
@Override
public Properties getClientInfo() throws SQLException {
checkClosed();
return new Properties();
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
throw getErrorReporter().errorClientInfoMethodNotSupported(Connection.class, "setClientInfo");
}
@Override
public void setClientInfo(String name, String value) throws SQLClientInfoException {
throw getErrorReporter().errorClientInfoMethodNotSupported(Connection.class, "setClientInfo");
}
protected boolean checkDatabaseEntitySupport() throws SQLException {
checkClosed();
StringBuilder sql = new StringBuilder(256);
ADBStatement stmt = createStatementImpl();
sql.append("select count(*) ");
sql.append("from Metadata.`Dataset` ");
sql.append("where DataverseName='Metadata' and DatasetName='Database'");
ADBResultSet resultSet = stmt.executeQuery(sql.toString());
try {
if (resultSet.next()) {
return resultSet.getInt(1) > 0;
}
return false;
} finally {
stmt.close();
}
}
public boolean isDatabaseEntitySupported() {
return databaseEntitySupported;
}
}
| 6,060 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBRowStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.nio.charset.StandardCharsets;
import java.sql.Date;
import java.sql.SQLException;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Period;
import java.time.ZoneId;
import java.time.format.DateTimeParseException;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.BeanDescription;
import com.fasterxml.jackson.databind.DeserializationConfig;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.deser.BeanDeserializerModifier;
import com.fasterxml.jackson.databind.module.SimpleModule;
public final class ADBRowStore {
static final char TEXT_DELIMITER = ':';
private static final String ROW_STORE_ATTR_NAME = ADBRowStore.class.getSimpleName();
private static final int FLOAT_NAN_BITS = Float.floatToIntBits(Float.NaN);
private static final int FLOAT_POSITIVE_ZERO_BITS = Float.floatToIntBits(+0.0f);
private static final int FLOAT_NEGATIVE_ZERO_BITS = Float.floatToIntBits(-0.0f);
private static final long DOUBLE_NAN_BITS = Double.doubleToLongBits(Double.NaN);
private static final long DOUBLE_POSITIVE_ZERO_BITS = Double.doubleToLongBits(+0.0d);
private static final long DOUBLE_NEGATIVE_ZERO_BITS = Double.doubleToLongBits(-0.0d);
static final Map<Class<?>, GetObjectFunction> OBJECT_ACCESSORS_ATOMIC = createAtomicObjectAccessorMap();
static final List<Class<?>> GET_OBJECT_NON_ATOMIC = Arrays.asList(Collection.class, List.class, Map.class);
private static final ZoneId TZ_UTC = ZoneId.of("UTC");
private final ADBResultSet resultSet;
private final ADBDatatype[] columnTypes;
private final Object[] objectStore;
private final long[] registerStore; // 2 registers per column
private final TimeZone tzSystem = TimeZone.getDefault();
private int parsedLength;
private long currentDateChronon;
private JsonGenerator jsonGen;
private StringWriter jsonGenBuffer;
public ADBRowStore(ADBResultSet resultSet, int initialColumnCount) {
this.resultSet = Objects.requireNonNull(resultSet);
columnTypes = new ADBDatatype[initialColumnCount];
objectStore = new Object[initialColumnCount];
registerStore = new long[initialColumnCount * 2];
}
void reset() {
Arrays.fill(columnTypes, ADBDatatype.MISSING);
Arrays.fill(registerStore, 0);
Arrays.fill(objectStore, null);
}
private void setColumnType(int columnIndex, ADBDatatype columnType) {
columnTypes[columnIndex] = columnType;
}
ADBDatatype getColumnType(int columnIndex) {
return columnTypes[columnIndex];
}
void putColumn(int columnIndex, char[] textChars, int textOffset, int textLength) throws SQLException {
byte valueTypeTag = parseTypeTag(textChars, textOffset, textLength);
ADBDatatype valueType = ADBDatatype.findByTypeTag(valueTypeTag);
if (valueType == null) {
throw getErrorReporter().errorUnexpectedType(valueTypeTag);
}
int nonTaggedOffset = textOffset + parsedLength;
int nonTaggedLength = textLength - parsedLength;
int nonTaggedEnd = nonTaggedOffset + nonTaggedLength; // = textOffset + textLength
setColumnType(columnIndex, valueType);
// NULL, BOOLEAN, BIGINT shouldn't normally happen. only handle here for completeness
switch (valueType) {
case MISSING:
case NULL:
// no content
break;
case BOOLEAN:
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
case DATE:
case TIME:
case DATETIME:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
long r0 = parseInt64(textChars, nonTaggedOffset, nonTaggedEnd);
setColumnRegisters(columnIndex, r0, 0);
break;
case STRING:
objectStore[columnIndex] = new String(textChars, nonTaggedOffset, nonTaggedLength);
break;
case DURATION:
int delimiterOffset = indexOf(TEXT_DELIMITER, textChars, nonTaggedOffset, nonTaggedEnd);
if (delimiterOffset < 0 || delimiterOffset == nonTaggedEnd - 1) {
throw getErrorReporter().errorInProtocol();
}
r0 = parseInt64(textChars, nonTaggedOffset, delimiterOffset);
long r1 = parseInt64(textChars, delimiterOffset + 1, nonTaggedEnd);
setColumnRegisters(columnIndex, r0, r1);
break;
case UUID:
// TODO: better encoding as 2 longs?
objectStore[columnIndex] = UUID.fromString(new String(textChars, nonTaggedOffset, nonTaggedLength));
break;
case OBJECT:
case ARRAY:
case MULTISET:
// Unexpected (shouldn't be called)
throw new IllegalArgumentException(String.valueOf(valueType));
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
void putNullColumn(int columnIndex) {
setColumnType(columnIndex, ADBDatatype.NULL);
}
void putBooleanColumn(int columnIndex, boolean value) {
setColumnType(columnIndex, ADBDatatype.BOOLEAN);
setColumnRegisters(columnIndex, value ? 1 : 0, 0);
}
void putInt64Column(int columnIndex, long value) {
setColumnType(columnIndex, ADBDatatype.BIGINT);
setColumnRegisters(columnIndex, value, 0);
}
void putArrayColumn(int columnIndex, List<?> value) {
setColumnType(columnIndex, ADBDatatype.ARRAY);
objectStore[columnIndex] = Objects.requireNonNull(value);
}
void putRecordColumn(int columnIndex, Map<?, ?> value) {
setColumnType(columnIndex, ADBDatatype.OBJECT);
objectStore[columnIndex] = Objects.requireNonNull(value);
}
private void setColumnRegisters(int columnIndex, long r0, long r1) {
int registerPos = columnIndex * 2;
registerStore[registerPos] = r0;
registerStore[++registerPos] = r1;
}
private long getColumnRegister(int columnIndex, int registerIndex) {
int registerPos = columnIndex * 2;
switch (registerIndex) {
case 0:
break;
case 1:
registerPos++;
break;
default:
throw new IllegalArgumentException();
}
return registerStore[registerPos];
}
private boolean getColumnRegisterAsBoolean(int columnIndex, int registerIndex) {
return getColumnRegister(columnIndex, registerIndex) != 0;
}
private byte getColumnRegisterAsByte(int columnIndex, int registerIndex) {
return (byte) getColumnRegister(columnIndex, registerIndex);
}
private short getColumnRegisterAsShort(int columnIndex, int registerIndex) {
return (short) getColumnRegister(columnIndex, registerIndex);
}
private int getColumnRegisterAsInt(int columnIndex, int registerIndex) {
return (int) getColumnRegister(columnIndex, registerIndex);
}
private float getColumnRegisterAsFloat(int columnIndex, int registerIndex) {
return Float.intBitsToFloat(getColumnRegisterAsFloatBits(columnIndex, registerIndex));
}
private boolean isColumnRegisterZeroOrNanFloat(int columnIndex, int registerIndex) {
int bits = getColumnRegisterAsFloatBits(columnIndex, registerIndex);
return bits == FLOAT_POSITIVE_ZERO_BITS || bits == FLOAT_NEGATIVE_ZERO_BITS || bits == FLOAT_NAN_BITS;
}
private int getColumnRegisterAsFloatBits(int columnIndex, int registerIndex) {
return getColumnRegisterAsInt(columnIndex, registerIndex);
}
private double getColumnRegisterAsDouble(int columnIndex, int registerIndex) {
return Double.longBitsToDouble(getColumnRegisterAsDoubleBits(columnIndex, registerIndex));
}
private boolean isColumnRegisterZeroOrNanDouble(int columnIndex, int registerIndex) {
long bits = getColumnRegisterAsDoubleBits(columnIndex, registerIndex);
return bits == DOUBLE_POSITIVE_ZERO_BITS || bits == DOUBLE_NEGATIVE_ZERO_BITS || bits == DOUBLE_NAN_BITS;
}
private long getColumnRegisterAsDoubleBits(int columnIndex, int registerIndex) {
return getColumnRegister(columnIndex, registerIndex);
}
private Period getColumnRegisterAsPeriod(int columnIndex, int registerIndex) {
return Period.ofMonths((int) getColumnRegister(columnIndex, registerIndex));
}
private Duration getColumnRegisterAsDuration(int columnIndex, int registerIndex) {
return Duration.ofMillis((int) getColumnRegister(columnIndex, registerIndex));
}
private Number getNumberFromObjectStore(int columnIndex) {
Object o = objectStore[columnIndex];
if (o != null) {
return (Number) o;
}
Number n;
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case TINYINT:
n = getColumnRegisterAsByte(columnIndex, 0);
break;
case SMALLINT:
n = getColumnRegisterAsShort(columnIndex, 0);
break;
case INTEGER:
n = getColumnRegisterAsInt(columnIndex, 0);
break;
case BIGINT:
n = getColumnRegister(columnIndex, 0);
break;
case FLOAT:
n = getColumnRegisterAsFloat(columnIndex, 0);
break;
case DOUBLE:
n = getColumnRegisterAsDouble(columnIndex, 0);
break;
default:
throw new IllegalArgumentException(String.valueOf(valueType));
}
objectStore[columnIndex] = n;
return n;
}
private String getStringFromObjectStore(int columnIndex) {
return (String) objectStore[columnIndex];
}
private UUID getUUIDFromObjectStore(int columnIndex) {
return (UUID) objectStore[columnIndex];
}
private Period getPeriodFromObjectStore(int columnIndex) {
Object o = objectStore[columnIndex];
if (o != null) {
return (Period) o;
}
ADBDatatype valueType = getColumnType(columnIndex);
if (valueType != ADBDatatype.YEARMONTHDURATION) {
throw new IllegalArgumentException(String.valueOf(valueType));
}
Period v = getColumnRegisterAsPeriod(columnIndex, 0);
objectStore[columnIndex] = v;
return v;
}
private Duration getDurationFromObjectStore(int columnIndex) {
Object o = objectStore[columnIndex];
if (o != null) {
return (Duration) o;
}
ADBDatatype valueType = getColumnType(columnIndex);
if (valueType != ADBDatatype.DAYTIMEDURATION) {
throw new IllegalArgumentException(String.valueOf(valueType));
}
Duration v = getColumnRegisterAsDuration(columnIndex, 0);
objectStore[columnIndex] = v;
return v;
}
private String getISODurationStringFromObjectStore(int columnIndex) {
Object o = objectStore[columnIndex];
if (o != null) {
return (String) o;
}
ADBDatatype valueType = getColumnType(columnIndex);
if (valueType != ADBDatatype.DURATION) {
throw new IllegalArgumentException(String.valueOf(valueType));
}
String v = getColumnRegisterAsPeriod(columnIndex, 0).toString()
+ getColumnRegisterAsDuration(columnIndex, 1).toString().substring(1);
objectStore[columnIndex] = v;
return v;
}
boolean getBoolean(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return false;
case BOOLEAN:
return getColumnRegisterAsBoolean(columnIndex, 0);
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return getColumnRegister(columnIndex, 0) != 0;
case FLOAT:
return !isColumnRegisterZeroOrNanFloat(columnIndex, 0);
case DOUBLE:
return !isColumnRegisterZeroOrNanDouble(columnIndex, 0);
case STRING:
return Boolean.parseBoolean(getStringFromObjectStore(columnIndex));
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
byte getByte(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return 0;
case BOOLEAN:
return (byte) (getColumnRegisterAsBoolean(columnIndex, 0) ? 1 : 0);
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return getColumnRegisterAsByte(columnIndex, 0);
case FLOAT:
return (byte) getColumnRegisterAsFloat(columnIndex, 0);
case DOUBLE:
return (byte) getColumnRegisterAsDouble(columnIndex, 0);
case STRING:
return (byte) parseInt64(getStringFromObjectStore(columnIndex));
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
short getShort(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return 0;
case BOOLEAN:
return (short) (getColumnRegisterAsBoolean(columnIndex, 0) ? 1 : 0);
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return getColumnRegisterAsShort(columnIndex, 0);
case FLOAT:
return (short) getColumnRegisterAsFloat(columnIndex, 0);
case DOUBLE:
return (short) getColumnRegisterAsDouble(columnIndex, 0);
case STRING:
return (short) parseInt64(getStringFromObjectStore(columnIndex));
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
int getInt(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return 0;
case BOOLEAN:
return getColumnRegisterAsBoolean(columnIndex, 0) ? 1 : 0;
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case DATE:
case TIME:
case YEARMONTHDURATION:
return getColumnRegisterAsInt(columnIndex, 0);
case FLOAT:
return (int) getColumnRegisterAsFloat(columnIndex, 0);
case DOUBLE:
return (int) getColumnRegisterAsDouble(columnIndex, 0);
case STRING:
return (int) parseInt64(getStringFromObjectStore(columnIndex));
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
long getLong(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return 0;
case BOOLEAN:
return getColumnRegisterAsBoolean(columnIndex, 0) ? 1 : 0;
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case DATE:
case TIME:
case DATETIME:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
return getColumnRegister(columnIndex, 0);
case FLOAT:
return (long) getColumnRegisterAsFloat(columnIndex, 0);
case DOUBLE:
return (long) getColumnRegisterAsDouble(columnIndex, 0);
case STRING:
return parseInt64(getStringFromObjectStore(columnIndex));
default:
// TODO:support temporal types?
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
float getFloat(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return 0;
case BOOLEAN:
return getColumnRegisterAsBoolean(columnIndex, 0) ? 1f : 0f;
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return getColumnRegister(columnIndex, 0);
case FLOAT:
return getColumnRegisterAsFloat(columnIndex, 0);
case DOUBLE:
return (float) getColumnRegisterAsDouble(columnIndex, 0);
case STRING:
try {
return Float.parseFloat(getStringFromObjectStore(columnIndex));
} catch (NumberFormatException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
double getDouble(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return 0;
case BOOLEAN:
return getColumnRegisterAsBoolean(columnIndex, 0) ? 1d : 0d;
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return getColumnRegister(columnIndex, 0);
case FLOAT:
return getColumnRegisterAsFloat(columnIndex, 0);
case DOUBLE:
return getColumnRegisterAsDouble(columnIndex, 0);
case STRING:
try {
return Double.parseDouble(getStringFromObjectStore(columnIndex));
} catch (NumberFormatException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
BigDecimal getBigDecimal(int columnIndex) throws SQLException {
return getBigDecimal(columnIndex, false, 0);
}
@SuppressWarnings("UnpredictableBigDecimalConstructorCall")
BigDecimal getBigDecimal(int columnIndex, boolean setScale, int scale) throws SQLException {
BigDecimal dec;
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case BOOLEAN:
dec = getColumnRegisterAsBoolean(columnIndex, 0) ? BigDecimal.ONE : BigDecimal.ZERO;
break;
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case DATE:
case TIME:
case DATETIME:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
dec = BigDecimal.valueOf(getColumnRegister(columnIndex, 0));
break;
case FLOAT:
try {
dec = new BigDecimal(getColumnRegisterAsFloat(columnIndex, 0));
} catch (NumberFormatException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
break;
case DOUBLE:
try {
dec = new BigDecimal(getColumnRegisterAsDouble(columnIndex, 0));
} catch (NumberFormatException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
break;
case STRING:
try {
dec = new BigDecimal(getStringFromObjectStore(columnIndex));
} catch (NumberFormatException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
break;
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
return setScale ? dec.setScale(scale, RoundingMode.DOWN) : dec;
}
private Date getDate(int columnIndex) throws SQLException {
return getDate(columnIndex, null);
}
Date getDate(int columnIndex, Calendar cal) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case DATE:
return toDateFromDateChronon(getColumnRegister(columnIndex, 0), getTimeZone(cal, tzSystem));
case DATETIME:
return toDateFromDatetimeChronon(getColumnRegister(columnIndex, 0), getTimeZone(cal, tzSystem));
case STRING:
try {
LocalDate d = LocalDate.parse(getStringFromObjectStore(columnIndex)); // TODO:review
return new Date(d.getYear() - 1900, d.getMonthValue() - 1, d.getDayOfMonth());
} catch (DateTimeParseException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
LocalDate getLocalDate(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case DATE:
return toLocalDateFromDateChronon(getColumnRegister(columnIndex, 0));
case DATETIME:
return toLocalDateFromDatetimeChronon(getColumnRegister(columnIndex, 0));
case STRING:
try {
return LocalDate.parse(getStringFromObjectStore(columnIndex)); // TODO:review
} catch (DateTimeParseException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
private Time getTime(int columnIndex) throws SQLException {
return getTime(columnIndex, null);
}
Time getTime(int columnIndex, Calendar cal) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case TIME:
return toTimeFromTimeChronon(getColumnRegister(columnIndex, 0), getTimeZone(cal, tzSystem));
case DATETIME:
return toTimeFromDatetimeChronon(getColumnRegister(columnIndex, 0), getTimeZone(cal, tzSystem));
case STRING:
try {
LocalTime t = LocalTime.parse(getStringFromObjectStore(columnIndex)); // TODO:review
return toTimeFromTimeChronon(TimeUnit.NANOSECONDS.toMillis(t.toNanoOfDay()),
getTimeZone(cal, tzSystem));
} catch (DateTimeParseException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
LocalTime getLocalTime(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case TIME:
return toLocalTimeFromTimeChronon(getColumnRegister(columnIndex, 0));
case DATETIME:
return toLocalTimeFromDatetimeChronon(getColumnRegister(columnIndex, 0));
case STRING:
try {
return LocalTime.parse(getStringFromObjectStore(columnIndex)); // TODO:review
} catch (DateTimeParseException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
private Timestamp getTimestamp(int columnIndex) throws SQLException {
return getTimestamp(columnIndex, null);
}
Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case DATE:
return toTimestampFromDateChronon(getColumnRegister(columnIndex, 0), getTimeZone(cal, tzSystem));
case DATETIME:
return toTimestampFromDatetimeChronon(getColumnRegister(columnIndex, 0), getTimeZone(cal, tzSystem));
case STRING:
try {
Instant i = Instant.parse(getStringFromObjectStore(columnIndex));
long millis0 = TimeUnit.SECONDS.toMillis(i.getEpochSecond());
long millis1 = TimeUnit.NANOSECONDS.toMillis(i.getNano());
return new Timestamp(millis0 + millis1);
} catch (DateTimeParseException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
LocalDateTime getLocalDateTime(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case DATE:
return toLocalDateTimeFromDateChronon(getColumnRegister(columnIndex, 0));
case DATETIME:
return toLocalDateTimeFromDatetimeChronon(getColumnRegister(columnIndex, 0));
case STRING:
try {
return LocalDateTime.parse(getStringFromObjectStore(columnIndex)); // TODO:review
} catch (DateTimeParseException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
Period getPeriod(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case YEARMONTHDURATION:
return getPeriodFromObjectStore(columnIndex);
case DURATION:
return getColumnRegisterAsPeriod(columnIndex, 0);
case STRING:
try {
return Period.parse(getStringFromObjectStore(columnIndex)); // TODO:review
} catch (DateTimeParseException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
Duration getDuration(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case DAYTIMEDURATION:
return getDurationFromObjectStore(columnIndex);
case DURATION:
return getColumnRegisterAsDuration(columnIndex, 1);
case STRING:
try {
return Duration.parse(getStringFromObjectStore(columnIndex)); // TODO:review
} catch (DateTimeParseException e) {
throw getErrorReporter().errorInvalidValueOfType(valueType);
}
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
byte[] getBinary(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case STRING:
return getStringFromObjectStore(columnIndex).getBytes(StandardCharsets.UTF_8);
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
UUID getUUID(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case UUID:
return getUUIDFromObjectStore(columnIndex);
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
String getString(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case BOOLEAN:
return Boolean.toString(getColumnRegisterAsBoolean(columnIndex, 0));
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return Long.toString(getColumnRegister(columnIndex, 0));
case FLOAT:
return Float.toString(getColumnRegisterAsFloat(columnIndex, 0));
case DOUBLE:
return Double.toString(getColumnRegisterAsDouble(columnIndex, 0));
case DATE:
return toLocalDateFromDateChronon(getColumnRegister(columnIndex, 0)).toString(); // TODO:review
case TIME:
return toLocalTimeFromTimeChronon(getColumnRegister(columnIndex, 0)).toString(); // TODO:review
case DATETIME:
return toLocalDateTimeFromDatetimeChronon(getColumnRegister(columnIndex, 0)).toString(); // TODO:review
case YEARMONTHDURATION:
return getPeriodFromObjectStore(columnIndex).toString(); // TODO:review
case DAYTIMEDURATION:
return getDurationFromObjectStore(columnIndex).toString(); // TODO:review
case DURATION:
return getISODurationStringFromObjectStore(columnIndex); // TODO:review
case STRING:
return getStringFromObjectStore(columnIndex);
case UUID:
return getUUIDFromObjectStore(columnIndex).toString();
case OBJECT:
case ARRAY:
return printAsJson(objectStore[columnIndex]);
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
Reader getCharacterStream(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case STRING:
return new StringReader(getStringFromObjectStore(columnIndex));
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
InputStream getInputStream(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case STRING:
return new ByteArrayInputStream(getStringFromObjectStore(columnIndex).getBytes(StandardCharsets.UTF_8));
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
Object getObject(int columnIndex) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
case BOOLEAN:
return getColumnRegisterAsBoolean(columnIndex, 0);
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
return getNumberFromObjectStore(columnIndex);
case DATE:
return toDateFromDateChronon(getColumnRegister(columnIndex, 0), tzSystem);
case TIME:
return toTimeFromTimeChronon(getColumnRegister(columnIndex, 0), tzSystem);
case DATETIME:
return toTimestampFromDatetimeChronon(getColumnRegister(columnIndex, 0), tzSystem);
case YEARMONTHDURATION:
return getPeriodFromObjectStore(columnIndex);
case DAYTIMEDURATION:
return getDurationFromObjectStore(columnIndex);
case DURATION:
return getISODurationStringFromObjectStore(columnIndex);
case STRING:
return getStringFromObjectStore(columnIndex);
case UUID:
return getUUIDFromObjectStore(columnIndex);
case OBJECT:
case ARRAY:
return objectStore[columnIndex]; // TODO:how to make immutable?
default:
throw getErrorReporter().errorUnexpectedType(valueType);
}
}
<T> T getObject(int columnIndex, Class<T> targetType) throws SQLException {
ADBDatatype valueType = getColumnType(columnIndex);
switch (valueType) {
case MISSING:
case NULL:
return null;
default:
GetObjectFunction getter = OBJECT_ACCESSORS_ATOMIC.get(targetType);
Object v;
if (getter != null) {
v = getter.getObject(this, columnIndex);
} else if (GET_OBJECT_NON_ATOMIC.contains(targetType)) {
v = getObject(columnIndex);
} else {
throw getErrorReporter().errorUnexpectedType(targetType);
}
return targetType.cast(v);
}
}
interface GetObjectFunction {
Object getObject(ADBRowStore rowStore, int columnIndex) throws SQLException;
}
private static Map<Class<?>, GetObjectFunction> createAtomicObjectAccessorMap() {
Map<Class<?>, GetObjectFunction> map = new HashMap<>();
map.put(Boolean.TYPE, ADBRowStore::getBoolean);
map.put(Boolean.class, ADBRowStore::getBoolean);
map.put(Byte.TYPE, ADBRowStore::getByte);
map.put(Byte.class, ADBRowStore::getByte);
map.put(Short.TYPE, ADBRowStore::getShort);
map.put(Short.class, ADBRowStore::getShort);
map.put(Integer.TYPE, ADBRowStore::getInt);
map.put(Integer.class, ADBRowStore::getInt);
map.put(Long.TYPE, ADBRowStore::getLong);
map.put(Long.class, ADBRowStore::getLong);
map.put(Float.TYPE, ADBRowStore::getFloat);
map.put(Float.class, ADBRowStore::getFloat);
map.put(Double.TYPE, ADBRowStore::getDouble);
map.put(Double.class, ADBRowStore::getDouble);
map.put(BigDecimal.class, ADBRowStore::getBigDecimal);
map.put(Date.class, ADBRowStore::getDate);
map.put(LocalDate.class, ADBRowStore::getLocalDate);
map.put(Time.class, ADBRowStore::getTime);
map.put(LocalTime.class, ADBRowStore::getLocalTime);
map.put(Timestamp.class, ADBRowStore::getTimestamp);
map.put(LocalDateTime.class, ADBRowStore::getLocalDateTime);
map.put(Period.class, ADBRowStore::getPeriod);
map.put(Duration.class, ADBRowStore::getDuration);
map.put(UUID.class, ADBRowStore::getUUID);
map.put(String.class, ADBRowStore::getString);
return map;
}
private Date toDateFromDateChronon(long dateChrononInDays, TimeZone tz) {
return new Date(getDatetimeChrononAdjusted(TimeUnit.DAYS.toMillis(dateChrononInDays), tz));
}
private Date toDateFromDatetimeChronon(long datetimeChrononInMillis, TimeZone tz) {
return new Date(getDatetimeChrononAdjusted(datetimeChrononInMillis, tz));
}
private LocalDate toLocalDateFromDateChronon(long dateChrononInDays) {
return LocalDate.ofEpochDay(dateChrononInDays);
}
private LocalDate toLocalDateFromDatetimeChronon(long datetimeChrononInMillis) {
// TODO: use LocalDate.ofInstant() in JDK 9+
return toLocalDateTimeFromDatetimeChronon(datetimeChrononInMillis).toLocalDate();
}
private Time toTimeFromTimeChronon(long timeChrononInMillis, TimeZone tz) {
long datetimeChrononInMillis = getCurrentDateChrononInMillis() + timeChrononInMillis;
return new Time(getDatetimeChrononAdjusted(datetimeChrononInMillis, tz));
}
private Time toTimeFromDatetimeChronon(long datetimeChrononInMillis, TimeZone tz) {
return new Time(getDatetimeChrononAdjusted(datetimeChrononInMillis, tz));
}
private LocalTime toLocalTimeFromTimeChronon(long timeChrononInMillis) {
return LocalTime.ofNanoOfDay(TimeUnit.MILLISECONDS.toNanos(timeChrononInMillis));
}
private LocalTime toLocalTimeFromDatetimeChronon(long datetimeChrononInMillis) {
// TODO: use LocalTime.ofInstant() in JDK 9+
return toLocalDateTimeFromDatetimeChronon(datetimeChrononInMillis).toLocalTime();
}
private Timestamp toTimestampFromDatetimeChronon(long datetimeChrononInMillis, TimeZone tz) {
return new Timestamp(getDatetimeChrononAdjusted(datetimeChrononInMillis, tz));
}
private Timestamp toTimestampFromDateChronon(long dateChrononInDays, TimeZone tz) {
return new Timestamp(getDatetimeChrononAdjusted(TimeUnit.DAYS.toMillis(dateChrononInDays), tz));
}
private LocalDateTime toLocalDateTimeFromDatetimeChronon(long datetimeChrononInMillis) {
return LocalDateTime.ofInstant(Instant.ofEpochMilli(datetimeChrononInMillis), TZ_UTC);
}
private LocalDateTime toLocalDateTimeFromDateChronon(long dateChrononInDays) {
return LocalDate.ofEpochDay(dateChrononInDays).atStartOfDay();
}
private long getDatetimeChrononAdjusted(long datetimeChrononInMillis, TimeZone tz) {
int tzOffset = tz.getOffset(datetimeChrononInMillis);
return datetimeChrononInMillis - tzOffset;
}
private long getCurrentDateChrononInMillis() {
if (currentDateChronon == 0) {
currentDateChronon = TimeUnit.DAYS.toMillis(TimeUnit.MILLISECONDS.toDays(System.currentTimeMillis()));
}
return currentDateChronon;
}
private TimeZone getTimeZone(Calendar cal, TimeZone tzDefault) {
return cal != null ? cal.getTimeZone() : tzDefault;
}
private String printAsJson(Object value) throws SQLException {
if (jsonGenBuffer == null) {
jsonGenBuffer = new StringWriter();
try {
//TODO:FIXME:need to configure generator to print java.sql.Date/Times properly
jsonGen = resultSet.metadata.statement.connection.protocol.getDriverContext().getGenericObjectWriter()
.getFactory().createGenerator(jsonGenBuffer);
} catch (IOException e) {
throw getErrorReporter().errorInResultHandling(e);
}
}
try {
jsonGen.writeObject(value);
jsonGen.flush();
return jsonGenBuffer.getBuffer().toString();
} catch (IOException e) {
throw getErrorReporter().errorInResultHandling(e);
} finally {
jsonGenBuffer.getBuffer().setLength(0);
}
}
ObjectReader createComplexColumnObjectReader(ObjectReader templateReader) {
return templateReader.withAttribute(ROW_STORE_ATTR_NAME, this);
}
static void configureADMFormatDeserialization(ObjectMapper objectMapper, SimpleModule serdeModule) {
objectMapper.configure(DeserializationFeature.USE_LONG_FOR_INTS, true);
serdeModule.setDeserializerModifier(createADMFormatDeserializerModifier());
}
private static BeanDeserializerModifier createADMFormatDeserializerModifier() {
return new BeanDeserializerModifier() {
@Override
public JsonDeserializer<?> modifyDeserializer(DeserializationConfig config, BeanDescription beanDesc,
JsonDeserializer<?> deserializer) {
if (String.class.equals(beanDesc.getClassInfo().getAnnotated())) {
ADBRowStore rowStore = (ADBRowStore) config.getAttributes().getAttribute(ROW_STORE_ATTR_NAME);
return rowStore.createADMFormatStringDeserializer();
} else {
return deserializer;
}
}
};
}
private JsonDeserializer<?> createADMFormatStringDeserializer() {
return new JsonDeserializer<Object>() {
@Override
public Object deserialize(JsonParser parser, DeserializationContext ctx) throws IOException {
if (!parser.hasToken(JsonToken.VALUE_STRING)) {
throw new IOException("Unexpected token");
}
try {
ADBRowStore.this.reset();
ADBRowStore.this.putColumn(0, parser.getTextCharacters(), parser.getTextOffset(),
parser.getTextLength());
return ADBRowStore.this.getObject(0);
} catch (SQLException e) {
throw new IOException(e);
}
}
};
}
@FunctionalInterface
public interface ICharAccessor<T> {
char charAt(T input, int index);
}
private long parseInt64(CharSequence buffer) throws SQLException {
return parseInt64(buffer, 0, buffer.length(), CharSequence::charAt);
}
private long parseInt64(char[] buffer, int begin, int end) throws SQLException {
return parseInt64(buffer, begin, end, (input, index) -> input[index]);
}
private <T> long parseInt64(T buffer, int begin, int end, ICharAccessor<T> charAccessor) throws SQLException {
if (end < begin) {
throw new IllegalArgumentException();
}
boolean positive = true;
long value = 0;
int offset = begin;
char c = charAccessor.charAt(buffer, offset);
if (c == '+') {
offset++;
} else if (c == '-') {
offset++;
positive = false;
}
try {
for (; offset < end; offset++) {
c = charAccessor.charAt(buffer, offset);
if (c >= '0' && c <= '9') {
value = Math.addExact(Math.multiplyExact(value, 10L), '0' - c);
} else {
throw getErrorReporter().errorInProtocol(String.valueOf(c));
}
}
if (positive) {
value = Math.multiplyExact(value, -1L);
}
return value;
} catch (ArithmeticException e) {
throw getErrorReporter().errorInProtocol();
}
}
private byte parseTypeTag(char[] textChars, int textOffset, int textLength) throws SQLException {
if (textLength == 0) {
// empty string
parsedLength = 0;
return ADBDatatype.STRING.getTypeTag();
}
if (textChars[textOffset] == TEXT_DELIMITER) {
// any string
parsedLength = 1;
return ADBDatatype.STRING.getTypeTag();
}
// any type
int typeTagLength = 2;
if (textLength < typeTagLength) {
throw getErrorReporter().errorInProtocol();
}
byte parsedTypeTag = getByteFromValidHexChars(textChars[textOffset], textChars[textOffset + 1]);
if (parsedTypeTag == ADBDatatype.MISSING.getTypeTag() || parsedTypeTag == ADBDatatype.NULL.getTypeTag()) {
parsedLength = typeTagLength;
return parsedTypeTag;
}
int delimiterLength = 1;
if (textLength < typeTagLength + delimiterLength) {
throw getErrorReporter().errorInProtocol();
}
if (textChars[textOffset + typeTagLength] != TEXT_DELIMITER) {
throw getErrorReporter().errorInProtocol();
}
parsedLength = typeTagLength + delimiterLength;
return parsedTypeTag;
}
private byte getByteFromValidHexChars(char c0, char c1) throws SQLException {
return (byte) ((getValueFromValidHexChar(c0) << 4) + getValueFromValidHexChar(c1));
}
private int getValueFromValidHexChar(char c) throws SQLException {
if (c >= '0' && c <= '9') {
return c - '0';
}
if (c >= 'a' && c <= 'f') {
return 10 + c - 'a';
}
if (c >= 'A' && c <= 'F') {
return 10 + c - 'A';
}
throw getErrorReporter().errorInProtocol(String.valueOf(c));
}
private static int indexOf(char c, char[] array, int begin, int end) {
for (int i = begin; i < end; i++) {
if (array[i] == c) {
return i;
}
}
return -1;
}
private ADBErrorReporter getErrorReporter() {
return resultSet.getErrorReporter();
}
}
| 6,061 |
0 | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc | Create_ds/asterixdb-clients/asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBDriverBase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.jdbc.core;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.logging.ConsoleHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
public abstract class ADBDriverBase {
static final int JDBC_MAJOR_VERSION = 4;
static final int JDBC_MINOR_VERSION = 2;
static final String JDBC_SCHEME = "jdbc:";
static final String LOGGING_PROPERTY_SUFFIX = ".log.stderr";
protected final String urlScheme;
protected final int defaultApiPort;
protected final ADBErrorReporter errorReporter;
private volatile ADBProductVersion driverVersion;
private volatile Map<String, ADBDriverProperty> supportedPropertiesIndex;
private volatile ADBDriverContext context;
public ADBDriverBase(String driverScheme, int defaultApiPort) {
this.urlScheme = JDBC_SCHEME + Objects.requireNonNull(driverScheme);
this.defaultApiPort = defaultApiPort;
this.errorReporter = createErrorReporter();
}
protected static void registerDriver(java.sql.Driver driver) {
try {
DriverManager.registerDriver(driver);
} catch (SQLException e) {
DriverManager.println(String.format("Error registering driver %s. %s", driver.getClass().getName(), e));
}
}
protected void parseConnectionProperties(Properties inProps, Map<ADBDriverProperty, Object> outProperties,
Map<String, ADBDriverProperty> supportedProperties, SQLWarning outWarning) throws SQLException {
if (inProps != null) {
for (Enumeration<?> en = inProps.propertyNames(); en.hasMoreElements();) {
String name = en.nextElement().toString();
String value = inProps.getProperty(name);
parseConnectionProperty(name, value, supportedProperties, outProperties, outWarning);
}
}
}
protected void parseConnectionProperty(String name, String textValue,
Map<String, ADBDriverProperty> supportedProperties, Map<ADBDriverProperty, Object> outProperties,
SQLWarning outWarning) throws SQLException {
ADBDriverProperty property = supportedProperties.get(name);
if (property == null) {
outWarning.setNextWarning(new SQLWarning(errorReporter.warningParameterNotSupported(name)));
return;
}
if (textValue == null || textValue.isEmpty()) {
return;
}
Object value;
try {
value = Objects.requireNonNull(property.getValueParser().apply(textValue));
} catch (RuntimeException e) {
throw errorReporter.errorParameterValueNotSupported(name);
}
outProperties.put(property, value);
}
private static Logger getParentLogger(Class<?> driverClass) {
return Logger.getLogger(driverClass.getPackage().getName());
}
protected static void setupLogging(Class<? extends java.sql.Driver> driverClass) {
String logLevel = System.getProperty(driverClass.getPackage().getName() + LOGGING_PROPERTY_SUFFIX);
if (logLevel == null) {
return;
}
Level level;
try {
level = Boolean.TRUE.toString().equals(logLevel) ? Level.ALL : Level.parse(logLevel.toUpperCase());
} catch (IllegalArgumentException e) {
// ignore
return;
}
ConsoleHandler ch = new ConsoleHandler();
ch.setLevel(level);
Logger parentLogger = getParentLogger(driverClass);
parentLogger.setLevel(level);
parentLogger.addHandler(ch);
}
public boolean acceptsURL(String url) {
return url.startsWith(urlScheme);
}
public Connection connect(String url, Properties info) throws SQLException {
if (!acceptsURL(url)) {
return null;
}
URI subUri;
try {
subUri = new URI(url.substring(JDBC_SCHEME.length()));
} catch (URISyntaxException e) {
throw errorReporter.errorParameterValueNotSupported("URL");
}
String host = subUri.getHost();
if (host == null) {
throw errorReporter.errorParameterValueNotSupported("URL");
}
int port = subUri.getPort();
if (port <= 0) {
port = defaultApiPort;
}
Map<ADBDriverProperty, Object> properties = new HashMap<>();
Map<String, ADBDriverProperty> supportedProperties = getOrCreateSupportedPropertiesIndex();
SQLWarning warning = new SQLWarning();
parseConnectionProperties(getURIParameters(subUri), properties, supportedProperties, warning);
parseConnectionProperties(info, properties, supportedProperties, warning);
warning = warning.getNextWarning() != null ? warning.getNextWarning() : null;
checkDriverVersion(properties);
String dataverseCanonicalName = getDataverseCanonicalNameFromURI(subUri);
ADBDriverContext driverContext = getOrCreateDriverContext();
ADBProtocolBase protocol = createProtocol(host, port, properties, driverContext);
try {
String serverVersion = protocol.connect();
ADBProductVersion databaseVersion = protocol.parseDatabaseVersion(serverVersion);
checkDatabaseVersion(properties, databaseVersion);
return createConnection(protocol, url, databaseVersion, dataverseCanonicalName, properties, warning);
} catch (SQLException e) {
try {
protocol.close();
} catch (SQLException e2) {
e.addSuppressed(e2);
}
throw e;
}
}
protected String getDataverseCanonicalNameFromURI(URI subUri) {
String path = subUri.getPath();
return path != null && path.length() > 1 && path.startsWith("/") ? path.substring(1) : null;
}
public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) {
Collection<ADBDriverProperty> supportedProperties = getOrCreateSupportedPropertiesIndex().values();
List<DriverPropertyInfo> result = new ArrayList<>(supportedProperties.size());
for (ADBDriverProperty property : supportedProperties) {
if (property.isHidden()) {
continue;
}
Object defaultValue = property.getDefaultValue();
DriverPropertyInfo propInfo = new DriverPropertyInfo(property.getPropertyName(),
defaultValue != null ? defaultValue.toString() : null);
result.add(propInfo);
}
return result.toArray(new DriverPropertyInfo[0]);
}
public int getMajorVersion() {
return getOrCreateDriverVersion().getMajorVersion();
}
public int getMinorVersion() {
return getOrCreateDriverVersion().getMinorVersion();
}
public boolean jdbcCompliant() {
return false;
}
public Logger getParentLogger() {
return getParentLogger(getClass());
}
protected Logger getLogger() {
return Logger.getLogger(getClass().getName());
}
protected ADBDriverContext getOrCreateDriverContext() {
ADBDriverContext result = context;
if (result == null) {
synchronized (this) {
result = context;
if (result == null) {
context = result = createDriverContext();
}
}
}
return result;
}
protected ADBDriverContext createDriverContext() {
return new ADBDriverContext(getOrCreateDriverVersion(), errorReporter, getLogger());
}
protected ADBProductVersion getOrCreateDriverVersion() {
ADBProductVersion result = driverVersion;
if (result == null) {
synchronized (this) {
result = driverVersion;
if (result == null) {
driverVersion = result = getDriverVersion();
}
}
}
return result;
}
protected abstract ADBProductVersion getDriverVersion();
protected Collection<ADBDriverProperty> getSupportedProperties() {
return Arrays.asList(ADBDriverProperty.Common.values());
}
private Map<String, ADBDriverProperty> getOrCreateSupportedPropertiesIndex() {
Map<String, ADBDriverProperty> result = supportedPropertiesIndex;
if (result == null) {
synchronized (this) {
result = supportedPropertiesIndex;
if (result == null) {
supportedPropertiesIndex = result = createPropertyIndexByName(getSupportedProperties());
}
}
}
return result;
}
private static Map<String, ADBDriverProperty> createPropertyIndexByName(Collection<ADBDriverProperty> properties) {
Map<String, ADBDriverProperty> m = new LinkedHashMap<>();
for (ADBDriverProperty p : properties) {
m.put(p.getPropertyName(), p);
}
return Collections.unmodifiableMap(m);
}
public void checkDriverVersion(Map<ADBDriverProperty, Object> properties) throws SQLException {
ADBProductVersion minExpectedVersion =
(ADBProductVersion) ADBDriverProperty.Common.MIN_DRIVER_VERSION.fetchPropertyValue(properties);
if (minExpectedVersion != null) {
ADBProductVersion driverVersion = getOrCreateDriverVersion();
if (!driverVersion.isAtLeast(minExpectedVersion)) {
throw errorReporter.errorUnexpectedDriverVersion(driverVersion, minExpectedVersion);
}
}
}
public void checkDatabaseVersion(Map<ADBDriverProperty, Object> properties, ADBProductVersion databaseVersion)
throws SQLException {
ADBProductVersion minExpectedVersion =
(ADBProductVersion) ADBDriverProperty.Common.MIN_DATABASE_VERSION.fetchPropertyValue(properties);
if (minExpectedVersion != null) {
if (!databaseVersion.isAtLeast(minExpectedVersion)) {
throw errorReporter.errorUnexpectedDatabaseVersion(databaseVersion, minExpectedVersion);
}
}
}
protected ADBErrorReporter createErrorReporter() {
return new ADBErrorReporter();
}
protected abstract ADBProtocolBase createProtocol(String host, int port, Map<ADBDriverProperty, Object> properties,
ADBDriverContext driverContext) throws SQLException;
protected ADBConnection createConnection(ADBProtocolBase protocol, String url, ADBProductVersion databaseVersion,
String dataverseCanonicalName, Map<ADBDriverProperty, Object> properties, SQLWarning connectWarning)
throws SQLException {
return new ADBConnection(protocol, url, databaseVersion, dataverseCanonicalName, properties, connectWarning);
}
protected abstract Properties getURIParameters(URI uri) throws SQLException;
}
| 6,062 |
0 | Create_ds/jfnr/src/test/java/com/cisco | Create_ds/jfnr/src/test/java/com/cisco/fnr/FNRTest.java | package com.cisco.fnr;
/*
* jfnr - uses JNA for calling native implementation of libFNR
*
* jfnr extensions are contributed by Bhanu Prakash Gopularam (bhanprak@cisco.com)
*
* libFNR - A reference implementation library for FNR encryption mode.
*
* FNR represents "Flexible Naor and Reingold" mode
* FNR is a small domain block cipher to encrypt small domain
* objects ( < 128 bits ) like IPv4, MAC, Credit Card numbers etc.
* FNR is designed by Sashank Dara (sadara@cisco.com), Scott Fluhrer (sfluhrer@cisco.com)
*
* jfnr extensions are contributed by Bhanu Prakash Gopularam (bhanprak@cisco.com)
*
* Copyright (C) 2014 , Cisco Systems Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
**/
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import javax.crypto.spec.SecretKeySpec;
import java.security.InvalidParameterException;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import java.util.Arrays;
import java.util.concurrent.Exchanger;
public class FNRTest extends TestCase {
FNR blockCipher ;
SecretKeySpec keySpec ;
String password;
String tweak;
/**
* Create the test case
*
* @param testName name of the test case
*/
public FNRTest (String testName)
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( FNRTest.class );
}
public void setUp() throws Exception {
super.setUp();
blockCipher = null;
password = "password"; // Not for production
tweak = "tweak" ; // Not for production
try {
initKeySpec();
} catch (Exception e) {
e.printStackTrace();
}
}
private void initKeySpec() throws NoSuchAlgorithmException, InvalidKeySpecException {
byte[] saltyBytes = FNRUtils.getRandomBytes(20);
keySpec = FNRUtils.getSecretKeySpec(password, saltyBytes);
}
public void tearDown() throws Exception {
}
/**
* Rigourous Test :-)
*/
public void testString(){
try {
System.out.println("Test String");
String plainText = "Hello123";
byte[] plainBytes = plainText.getBytes();
blockCipher = new FNR(keySpec.getEncoded(), tweak, plainText.getBytes().length * Byte.SIZE);
byte[] cipherBytes = blockCipher.encrypt(plainBytes);
byte[] decryptBytes = blockCipher.decrypt(cipherBytes);
if (Arrays.equals(plainBytes, decryptBytes)) {
System.out.println("It works for Strings!");
assertTrue(true);
}
} catch (Exception e) {
System.out
.println("Something went wrong .. some where for String .."
+ e.getMessage());
assertTrue(false);
}
}
public void testIPv4(){
try {
System.out.println("Test IPv4 Address");
String plainIP = "10.20.30.40";
String decryptedIP, cipherIP;
final byte[] intArray = FNRUtils.rankIPAddress(plainIP);
blockCipher = new FNR(keySpec.getEncoded(), tweak, intArray.length * Byte.SIZE);
byte[] cipherBytes = blockCipher.encrypt(intArray);
cipherIP = FNRUtils.deRankIPAddress(cipherBytes);
System.out.println("Given IPv4 Address is " + plainIP);
System.out.println("Encrypted IPv4 Address is " + cipherIP);
byte[] decryptBytes = blockCipher.decrypt(cipherBytes);
decryptedIP = FNRUtils.deRankIPAddress(decryptBytes);
if (plainIP.equals(decryptedIP)) {
System.out.println("It works for IPv4 Address!");
assertTrue(true);
}
} catch (Exception e) {
System.out
.println("Something went wrong .. some where for String .."
+ e.getMessage());
assertTrue(false);
}
}
public void testTweakSize(){
System.out.println("Testing tweak size");
try {
tweak ="thisislongtweakeeeeeeee" ;
blockCipher = new FNR(keySpec.getEncoded(), tweak, 32);
}
catch (InvalidParameterException e){
assertFalse("Invalid tweak size", false);
}
try {
tweak ="smalltweak" ;
blockCipher = new FNR(keySpec.getEncoded(), tweak, 32);
}
catch (InvalidParameterException e){
assertFalse("Invalid tweak size", false);
}
try {
tweak ="tweak" ;
blockCipher = new FNR(keySpec.getEncoded(), tweak, 32);
}
catch (InvalidParameterException e){
assertTrue("Invalid tweak size", false);
}
}
public void testBlockSize(){
System.out.println("Testing Block size");
try {
blockCipher = new FNR(keySpec.getEncoded(), tweak, 0);
}
catch (InvalidParameterException e){
assertFalse("Invalid block size", false);
}
try {
blockCipher = new FNR(keySpec.getEncoded(), tweak, 130);
}
catch (InvalidParameterException e){
assertFalse("Invalid block size", false);
}
try {
blockCipher = new FNR(keySpec.getEncoded(), tweak, 32);
}
catch (InvalidParameterException e){
assertTrue("Invalid block size", false);
}
}
public void testInputLength(){
System.out.println("Testing Input Lengths in Encryption");
byte[] bytes = new byte[10];
Arrays.fill(bytes, (byte) 0); //
blockCipher = new FNR(keySpec.getEncoded(), tweak, 32); // 4 bytes
try {
blockCipher.encrypt(bytes);
}
catch (InvalidParameterException e){
assertFalse("Invalid input size", false);
}
try {
blockCipher.decrypt(bytes);
}
catch (InvalidParameterException e){
assertFalse("Invalid input size", false);
}
bytes = new byte[4];
Arrays.fill(bytes, (byte) 0); //
try {
blockCipher.encrypt(bytes);
}
catch (InvalidParameterException e){
assertTrue("Invalid input size" + e.getMessage(), false);
}
try {
blockCipher.decrypt(bytes);
}
catch (InvalidParameterException e){
assertTrue("Invalid input size" + e.getMessage(), false);
}
}
public void testKeySize(){
System.out.println("Testing Key Sizes in Encryption");
byte[] plainBytes = new byte[4];
byte[] keyBytes = FNRUtils.getRandomBytes(20);
Arrays.fill(plainBytes, (byte) 0); //
try {
blockCipher = new FNR(keyBytes, tweak, 32); // 4 bytes
}
catch (InvalidParameterException e){
assertTrue("Invalid key size", true);
}
password = "password123344555555555"; // Not for production
tweak = "tweak" ; // Not for production
try {
initKeySpec();
} catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
e.printStackTrace();
}
try {
blockCipher = new FNR(keyBytes, tweak, 32); // 4 bytes
}
catch (InvalidParameterException e){
assertTrue("Invalid key size", true);
}
try {
blockCipher = new FNR(null, tweak, 32); // 4 bytes
}
catch (InvalidParameterException e){
assertTrue("Invalid key size", true);
}
try {
keyBytes = FNRUtils.getRandomBytes(16);
blockCipher = new FNR(keyBytes, tweak, 32); // 4 bytes
}
catch (InvalidParameterException e){
assertTrue("Invalid key size", false);
}
}
} | 6,063 |
0 | Create_ds/jfnr/src/main/java/com/cisco | Create_ds/jfnr/src/main/java/com/cisco/fnr/FNRLibrary.java | package com.cisco.fnr;
/*
* jfnr - uses JNA for calling native implementation of libFNR
*
* jfnr extensions are contributed by Bhanu Prakash Gopularam (bhanprak@cisco.com)
*
* libFNR - A reference implementation library for FNR encryption mode.
*
* FNR represents "Flexible Naor and Reingold" mode
* FNR is a small domain block cipher to encrypt small domain
* objects ( < 128 bits ) like IPv4, MAC, Credit Card numbers etc.
* FNR is designed by Sashank Dara (sadara@cisco.com), Scott Fluhrer (sfluhrer@cisco.com)
*
* Copyright (C) 2014 , Cisco Systems Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
**/
import com.sun.jna.IntegerType;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.Structure;
import java.util.Arrays;
import java.util.List;
public interface FNRLibrary extends Library {
public static class fnr_expanded_key extends Structure {
public int full_bytes;
public char final_mask;
public int full_elements;
public byte final_element_mask;
public int num_bits;
public size_t size;
AES_KEY expanded_aes_key;
byte[] aes_key;
byte green[];
byte red[] = new byte[1];
@Override
protected List getFieldOrder() {
return Arrays.asList(new String[]{"final_element_mask",
"final_mask", "full_bytes", "full_elements",
"num_bits", "size",});
}
public static class ByReference extends fnr_expanded_key implements
Structure.ByReference {
}
public static class ByValue extends fnr_expanded_key implements
Structure.ByValue {
}
}
public static class fnr_expanded_tweak extends Structure {
public static class ByReference extends fnr_expanded_tweak
implements Structure.ByReference {
}
public static class ByValue extends fnr_expanded_tweak implements
Structure.ByValue {
}
public byte[] tweak = new byte[15];
@Override
protected List getFieldOrder() {
return Arrays.asList(new String[] { "tweak" });
}
}
public static class AES_KEY extends Structure {
public long rd_key[] = new long[4 * (14 + 1)];
int rounds;
@Override
protected List getFieldOrder() {
return Arrays.asList(new String[] { "rd_key", "rounds" });
}
public static class ByReference extends AES_KEY implements
Structure.ByReference {
}
public static class ByValue extends AES_KEY implements
Structure.ByValue {
}
}
public static class size_t extends IntegerType {
public size_t() {
this(0);
}
public size_t(long value) {
super(Native.SIZE_T_SIZE, value);
}
}
public void FNR_init();
public fnr_expanded_key.ByReference FNR_expand_key(byte[] aes_key,
int aes_key_size, size_t num_bits);
public void FNR_expand_tweak(
fnr_expanded_tweak.ByReference expanded_tweak,
fnr_expanded_key.ByReference key, byte[] tweak, size_t len_tweak);
void FNR_encrypt(fnr_expanded_key.ByReference key,
fnr_expanded_tweak.ByReference tweak, byte[] plaintext,
byte[] ciphertext);
void FNR_decrypt(fnr_expanded_key.ByReference key,
fnr_expanded_tweak.ByReference tweak, byte[] ciphertext,
byte[] plaintext);
}
| 6,064 |
0 | Create_ds/jfnr/src/main/java/com/cisco | Create_ds/jfnr/src/main/java/com/cisco/fnr/FNRUtils.java | package com.cisco.fnr;
/*
* jfnr - uses JNA for calling native implementation of libFNR
*
* jfnr extensions are contributed by Bhanu Prakash Gopularam (bhanprak@cisco.com)
*
* libFNR - A reference implementation library for FNR encryption mode.
*
* FNR represents "Flexible Naor and Reingold" mode
* FNR is a small domain block cipher to encrypt small domain
* objects ( < 128 bits ) like IPv4, MAC, Credit Card numbers etc.
* FNR is designed by Sashank Dara (sadara@cisco.com), Scott Fluhrer (sfluhrer@cisco.com)
*
*
* Copyright (C) 2014 , Cisco Systems Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
**/
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import java.nio.ByteBuffer;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.spec.InvalidKeySpecException;
public class FNRUtils {
public static byte[] rankIPAddress(String ipAddress){
int a,b,c,d ;
String[] comps = ipAddress.split("\\.");
a = Integer.valueOf( comps[0]);
b = Integer.valueOf( comps[1]);
c = Integer.valueOf( comps[2]);
d = Integer.valueOf( comps[3]);
int ip = (a << 24) + (b << 16) + (c << 8) + d;
return ByteBuffer.allocate(4).putInt(ip).array();
}
public static String deRankIPAddress(byte[] ipBytes){
final int ip = ByteBuffer.wrap(ipBytes).getInt();
return toIPv4String(ip);
}
public static String toIPv4String (int address)
{
StringBuilder sb = new StringBuilder(16);
for (int ii = 3; ii >= 0; ii--)
{
sb.append((int) (0xFF & (address >> (8*ii))));
if (ii > 0) sb.append(".");
}
return sb.toString();
}
public static SecretKeySpec getSecretKeySpec(String password, byte[] saltyBytes) throws NoSuchAlgorithmException, InvalidKeySpecException {
int pswdIterations = 65536 ;
int keySize = 128;
// Derive the key
SecretKeyFactory factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
PBEKeySpec spec = new PBEKeySpec( password.toCharArray(),saltyBytes,
pswdIterations, keySize );
SecretKey secretKey = factory.generateSecret(spec);
return new SecretKeySpec(secretKey.getEncoded(), "AES");
}
public static byte[] getRandomBytes(int count) {
// Generate the Salt
SecureRandom random = new SecureRandom();
byte[] saltyBytes = new byte[count];
random.nextBytes(saltyBytes);
return saltyBytes;
}
}
| 6,065 |
0 | Create_ds/jfnr/src/main/java/com/cisco | Create_ds/jfnr/src/main/java/com/cisco/fnr/FNR.java | package com.cisco.fnr;
/*
* jfnr - uses JNA for calling native implementation of libFNR
*
* jfnr extensions are contributed by Bhanu Prakash Gopularam (bhanprak@cisco.com)
*
* libFNR - A reference implementation library for FNR encryption mode.
*
* FNR represents "Flexible Naor and Reingold" mode
* FNR is a small domain block cipher to encrypt small domain
* objects ( < 128 bits ) like IPv4, MAC, Credit Card numbers etc.
* FNR is designed by Sashank Dara (sadara@cisco.com), Scott Fluhrer (sfluhrer@cisco.com)
*
* jfnr extensions are contributed by Bhanu Prakash Gopularam (bhanprak@cisco.com)
*
* Copyright (C) 2014 , Cisco Systems Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
**/
import com.sun.jna.Native;
import java.security.InvalidParameterException;
public class FNR {
private int blockSize;
private FNRLibrary.fnr_expanded_key.ByReference expanded_key = null;
private FNRLibrary.fnr_expanded_tweak.ByReference expanded_tweak = null;
private FNRLibrary fnrInstance ;
public FNR(byte[] key, String tweak, int blockSize) throws InvalidParameterException{
final int MAX_BLOCK_SIZE = 128;
final int KEY_SIZE = 128;
final int MAX_TWEAK_LENGTH = 8;
final int MIN_BLOCK_SIZE = 16;
if( blockSize < MIN_BLOCK_SIZE || blockSize >= MAX_BLOCK_SIZE)
throw new InvalidParameterException("Invalid Block Size");
if(tweak.length() > MAX_TWEAK_LENGTH)
throw new InvalidParameterException("Invalid Tweak Size");
if(key == null || key.length * 8 != KEY_SIZE)
throw new InvalidParameterException("Invalid Key Size");
try {
// Load the Library
fnrInstance = (FNRLibrary) Native.loadLibrary("fnr", FNRLibrary.class);
// 1. FNR Init
fnrInstance.FNR_init();
// 2. expand key
expanded_key = fnrInstance.FNR_expand_key(key, MAX_BLOCK_SIZE,
new FNRLibrary.size_t(blockSize));
// 3. create tweak
expanded_tweak = new FNRLibrary.fnr_expanded_tweak.ByReference();
fnrInstance.FNR_expand_tweak(expanded_tweak, expanded_key,
tweak.getBytes(), new FNRLibrary.size_t(tweak.length()));
}
catch (UnsatisfiedLinkError error){
throw new InvalidParameterException("Invalid library file" +error.getMessage()) ;
}
this.blockSize = blockSize;
}
public byte[] encrypt(byte[] plainBytes) throws InvalidParameterException {
if (plainBytes == null
|| (plainBytes.length* Byte.SIZE) != blockSize)
throw new InvalidParameterException("Invalid Input Length");
byte[] cipherBytes = new byte[plainBytes.length];
fnrInstance.FNR_encrypt(expanded_key, expanded_tweak, plainBytes,
cipherBytes);
return cipherBytes;
}
public byte[] decrypt(byte[] cipherBytes) throws InvalidParameterException {
if (cipherBytes == null
|| (cipherBytes.length * Byte.SIZE) != blockSize)
throw new InvalidParameterException("Invalid Input Length");
byte decryptedBytes[] = new byte[cipherBytes.length];
fnrInstance.FNR_decrypt(expanded_key, expanded_tweak, cipherBytes,
decryptedBytes);
return decryptedBytes;
}
}
| 6,066 |
0 | Create_ds/fineract-cn-reporting/component-test/src/main/java/org/apache/fineract/cn | Create_ds/fineract-cn-reporting/component-test/src/main/java/org/apache/fineract/cn/reporting/AbstractReportingSpecificationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting;
import org.apache.fineract.cn.reporting.api.v1.client.ReportManager;
import org.apache.fineract.cn.reporting.service.ReportingConfiguration;
import java.security.interfaces.RSAPrivateKey;
import org.apache.fineract.cn.anubis.test.v1.TenantApplicationSecurityEnvironmentTestRule;
import org.apache.fineract.cn.api.context.AutoUserContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.netflix.feign.EnableFeignClients;
import org.springframework.cloud.netflix.ribbon.RibbonClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest(
webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT,
classes = {AbstractReportingSpecificationTest.TestConfiguration.class}
)
public class AbstractReportingSpecificationTest extends SuiteTestEnvironment {
private static final String APP_NAME = "reporting-v1";
public static final String LOGGER_NAME = "test-logger";
@Configuration
@EnableFeignClients(basePackages = {"org.apache.fineract.cn.reporting.api.v1.client"})
@Import({ReportingConfiguration.class})
public static class TestConfiguration {
public TestConfiguration() {
super();
}
@Bean(name = LOGGER_NAME)
public Logger logger() {
return LoggerFactory.getLogger(LOGGER_NAME);
}
}
@Rule
public final TenantApplicationSecurityEnvironmentTestRule tenantApplicationSecurityEnvironment
= new TenantApplicationSecurityEnvironmentTestRule(testEnvironment, this::waitForInitialize);
private AutoUserContext userContext;
@Autowired
ReportManager testSubject;
@Autowired
@Qualifier(LOGGER_NAME)
Logger logger;
@Before
public void prepTest() {
userContext = tenantApplicationSecurityEnvironment.createAutoUserContext(TEST_USER);
final RSAPrivateKey tenantPrivateKey = tenantApplicationSecurityEnvironment.getSystemSecurityEnvironment().tenantPrivateKey();
logger.info("tenantPrivateKey = {}", tenantPrivateKey);
}
@After
public void cleanTest() {
userContext.close();
}
boolean waitForInitialize() {
return true;
}
}
| 6,067 |
0 | Create_ds/fineract-cn-reporting/component-test/src/main/java/org/apache/fineract/cn | Create_ds/fineract-cn-reporting/component-test/src/main/java/org/apache/fineract/cn/reporting/TestReportingSpecifications.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.junit.Assert;
import org.junit.Test;
import java.util.List;
public class TestReportingSpecifications extends AbstractReportingSpecificationTest {
@Test
public void shouldReturnBalanceSheetReportDefinition() {
final List<ReportDefinition> balanceSheetReportDefinitions = super.testSubject.fetchReportDefinitions("Accounting");
Assert.assertTrue(
balanceSheetReportDefinitions.stream().anyMatch(reportDefinition -> reportDefinition.getIdentifier().equals("Balancesheet"))
);
}
@Test
public void shouldReturnCustomerListReportDefinition() {
final List<ReportDefinition> customerListReportDefinitions = super.testSubject.fetchReportDefinitions("Customer");
Assert.assertTrue(
customerListReportDefinitions.stream().anyMatch(reportDefinition -> reportDefinition.getIdentifier().equals("Listing"))
);
}
@Test
public void shouldReturnDepositListReportDefinition() {
final List<ReportDefinition> depositListReportDefinitions = super.testSubject.fetchReportDefinitions("Deposit");
Assert.assertTrue(
depositListReportDefinitions.stream().anyMatch(reportDefinition -> reportDefinition.getIdentifier().equals("Listing"))
);
}
@Test
public void shouldReturnIncomeStatementReportDefinition() {
final List<ReportDefinition> incomeStatementReportDefinitions = super.testSubject.fetchReportDefinitions("Accounting");
Assert.assertTrue(
incomeStatementReportDefinitions.stream().anyMatch(reportDefinition -> reportDefinition.getIdentifier().equals("Incomestatement"))
);
}
@Test
public void shouldReturnLoanListReportDefinition() {
final List<ReportDefinition> loanListReportDefinitions = super.testSubject.fetchReportDefinitions("Loan");
Assert.assertTrue(
loanListReportDefinitions.stream().anyMatch(reportDefinition -> reportDefinition.getIdentifier().equals("Listing"))
);
}
@Test
public void shouldReturnTellerListReportDefinition() {
final List<ReportDefinition> tellerListReportDefinitions = super.testSubject.fetchReportDefinitions("Teller");
Assert.assertTrue(
tellerListReportDefinitions.stream().anyMatch(reportDefinition -> reportDefinition.getIdentifier().equals("Listing"))
);
}
@Test
public void shouldReturnTellerTransactionReportDefinition() {
final List<ReportDefinition> tellerTransactionReportDefinitions = super.testSubject.fetchReportDefinitions("Teller");
Assert.assertTrue(
tellerTransactionReportDefinitions.stream().anyMatch(reportDefinition -> reportDefinition.getIdentifier().equals("Transactions"))
);
}
}
| 6,068 |
0 | Create_ds/fineract-cn-reporting/component-test/src/main/java/org/apache/fineract/cn | Create_ds/fineract-cn-reporting/component-test/src/main/java/org/apache/fineract/cn/reporting/SuiteTestEnvironment.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting;
import org.apache.fineract.cn.test.env.TestEnvironment;
import org.apache.fineract.cn.test.fixture.TenantDataStoreContextTestRule;
import org.apache.fineract.cn.test.fixture.cassandra.CassandraInitializer;
import org.apache.fineract.cn.test.fixture.postgresql.PostgreSQLInitializer;
import org.junit.ClassRule;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
public class SuiteTestEnvironment {
static final String APP_NAME = "reporting-v1";
static final String TEST_USER = "shu";
public final static TestEnvironment testEnvironment = new TestEnvironment(APP_NAME);
private final static CassandraInitializer cassandraInitializer = new CassandraInitializer();
private final static PostgreSQLInitializer postgreSQLInitializer = new PostgreSQLInitializer();
final static TenantDataStoreContextTestRule tenantDataStoreContext = TenantDataStoreContextTestRule.forRandomTenantName(cassandraInitializer, postgreSQLInitializer);
@ClassRule
public static TestRule orderClassRules = RuleChain
.outerRule(testEnvironment)
.around(cassandraInitializer)
.around(postgreSQLInitializer)
.around(tenantDataStoreContext);
}
| 6,069 |
0 | Create_ds/fineract-cn-reporting/component-test/src/main/java/org/apache/fineract/cn | Create_ds/fineract-cn-reporting/component-test/src/main/java/org/apache/fineract/cn/reporting/TestSuite.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
@RunWith(Suite.class)
@Suite.SuiteClasses({
TestReportingSpecifications.class
})
public class TestSuite {
}
| 6,070 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/PermittableGroupIds.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1;
@SuppressWarnings("unused")
public interface PermittableGroupIds {
String REPORT_MANAGEMENT = "reporting__v1__general";
}
| 6,071 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/EventConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1;
@SuppressWarnings("unused")
public interface EventConstants {
String DESTINATION = "reporting-v1";
String SELECTOR_NAME = "operation";
String INITIALIZE = "initialize";
String SELECTOR_INITIALIZE = SELECTOR_NAME + " = '" + INITIALIZE + "'";
}
| 6,072 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/Type.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
public enum Type {
TEXT,
NUMBER,
DATE
}
| 6,073 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/AutoCompleteResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
import java.util.List;
public class AutoCompleteResource {
private String path;
private List<String> terms;
public AutoCompleteResource() {
super();
}
public String getPath() {
return this.path;
}
public void setPath(final String path) {
this.path = path;
}
public List<String> getTerms() {
return this.terms;
}
public void setTerms(final List<String> terms) {
this.terms = terms;
}
}
| 6,074 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/Row.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
import java.util.List;
public class Row {
private List<Value> values;
public Row() {
super();
}
public List<Value> getValues() {
return this.values;
}
public void setValues(final List<Value> values) {
this.values = values;
}
}
| 6,075 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/ReportRequest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
import java.util.List;
public class ReportRequest {
private List<QueryParameter> queryParameters;
private List<DisplayableField> displayableFields;
public ReportRequest() {
super();
}
public List<QueryParameter> getQueryParameters() {
return this.queryParameters;
}
public void setQueryParameters(final List<QueryParameter> queryParameters) {
this.queryParameters = queryParameters;
}
public List<DisplayableField> getDisplayableFields() {
return this.displayableFields;
}
public void setDisplayableFields(final List<DisplayableField> displayableFields) {
this.displayableFields = displayableFields;
}
}
| 6,076 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/ReportDefinition.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
import java.util.List;
public class ReportDefinition {
private String identifier;
private String name;
private String description;
private List<QueryParameter> queryParameters;
private List<DisplayableField> displayableFields;
public ReportDefinition() {
super();
}
public String getIdentifier() {
return this.identifier;
}
public void setIdentifier(final String identifier) {
this.identifier = identifier;
}
public String getName() {
return this.name;
}
public void setName(final String name) {
this.name = name;
}
public String getDescription() {
return this.description;
}
public void setDescription(final String description) {
this.description = description;
}
public List<QueryParameter> getQueryParameters() {
return this.queryParameters;
}
public void setQueryParameters(final List<QueryParameter> queryParameters) {
this.queryParameters = queryParameters;
}
public List<DisplayableField> getDisplayableFields() {
return this.displayableFields;
}
public void setDisplayableFields(final List<DisplayableField> displayableFields) {
this.displayableFields = displayableFields;
}
}
| 6,077 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/DisplayableField.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
public class DisplayableField {
private String name;
private Type type;
private Boolean mandatory;
public DisplayableField() {
super();
}
public String getName() {
return this.name;
}
public void setName(final String name) {
this.name = name;
}
public Type getType() {
return this.type;
}
public void setType(final Type type) {
this.type = type;
}
public Boolean getMandatory() {
return this.mandatory;
}
public void setMandatory(final Boolean mandatory) {
this.mandatory = mandatory;
}
}
| 6,078 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/Footer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
import java.util.List;
public class Footer {
private List<Value> values;
public Footer() {
super();
}
public List<Value> getValues() {
return this.values;
}
public void setValues(final List<Value> values) {
this.values = values;
}
}
| 6,079 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/Value.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
public class Value {
private String[] values;
private Type type;
public Value() {
super();
}
public String[] getValues() {
return this.values;
}
public void setValues(final String[] values) {
this.values = values;
}
public Type getType() {
return this.type;
}
public void setType(final Type type) {
this.type = type;
}
}
| 6,080 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/ReportPage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
import java.util.List;
public class ReportPage {
private String name;
private String description;
private String generatedOn;
private String generatedBy;
private Header header;
private List<Row> rows;
private Footer footer;
private boolean hasMore;
public ReportPage() {
super();
}
public String getName() {
return this.name;
}
public void setName(final String name) {
this.name = name;
}
public String getDescription() {
return this.description;
}
public void setDescription(final String description) {
this.description = description;
}
public String getGeneratedOn() {
return this.generatedOn;
}
public void setGeneratedOn(final String generatedOn) {
this.generatedOn = generatedOn;
}
public String getGeneratedBy() {
return this.generatedBy;
}
public void setGeneratedBy(final String generatedBy) {
this.generatedBy = generatedBy;
}
public Header getHeader() {
return this.header;
}
public void setHeader(final Header header) {
this.header = header;
}
public List<Row> getRows() {
return this.rows;
}
public void setRows(final List<Row> rows) {
this.rows = rows;
}
public Footer getFooter() {
return this.footer;
}
public void setFooter(final Footer footer) {
this.footer = footer;
}
public void setHasMore(final boolean hasMore) {
this.hasMore = hasMore;
}
public boolean isHasMore() {
return hasMore;
}
}
| 6,081 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/Header.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
import java.util.List;
public class Header {
private List<String> columnNames;
public Header() {
super();
}
public List<String> getColumnNames() {
return this.columnNames;
}
public void setColumnNames(final List<String> columnNames) {
this.columnNames = columnNames;
}
}
| 6,082 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/domain/QueryParameter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.domain;
public class QueryParameter {
public enum Operator {
EQUALS,
IN,
LIKE,
BETWEEN,
GREATER,
LESSER
}
private String name;
private Type type;
private Operator operator;
private String value;
private Boolean mandatory;
private AutoCompleteResource autoCompleteResource;
public QueryParameter() {
super();
}
public String getName() {
return this.name;
}
public void setName(final String name) {
this.name = name;
}
public Type getType() {
return this.type;
}
public void setType(final Type type) {
this.type = type;
}
public Operator getOperator() {
return this.operator;
}
public void setOperator(final Operator operator) {
this.operator = operator;
}
public String getValue() {
return this.value;
}
public void setValue(final String value) {
this.value = value;
}
public Boolean getMandatory() {
return this.mandatory;
}
public void setMandatory(final Boolean mandatory) {
this.mandatory = mandatory;
}
public AutoCompleteResource getAutoCompleteResource() {
return this.autoCompleteResource;
}
public void setAutoCompleteResource(final AutoCompleteResource autoCompleteResource) {
this.autoCompleteResource = autoCompleteResource;
}
}
| 6,083 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/client/ReportManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.client;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import java.util.List;
import org.apache.fineract.cn.api.annotation.ThrowsException;
import org.apache.fineract.cn.api.annotation.ThrowsExceptions;
import org.apache.fineract.cn.api.util.CustomFeignClientsConfiguration;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
@SuppressWarnings("unused")
@FeignClient(path="/reporting/v1", url = "http://${kubernetes.reporting.service.name}:${kubernetes.reporting.server.port}", configuration = CustomFeignClientsConfiguration.class)
public interface ReportManager {
@RequestMapping(
value = "/categories",
method = RequestMethod.GET,
produces = MediaType.ALL_VALUE,
consumes = MediaType.APPLICATION_JSON_VALUE
)
List<String> fetchCategories();
@RequestMapping(
value = "/categories/{category}",
method = RequestMethod.GET,
produces = MediaType.ALL_VALUE,
consumes = MediaType.APPLICATION_JSON_VALUE)
@ThrowsExceptions({
@ThrowsException(status = HttpStatus.NOT_FOUND, exception = ReportNotFoundException.class),
})
List<ReportDefinition> fetchReportDefinitions(@PathVariable("category") final String category);
@RequestMapping(
value = "/categories/{category}/reports/{identifier}",
method = RequestMethod.POST,
produces = MediaType.APPLICATION_JSON_VALUE,
consumes = MediaType.APPLICATION_JSON_VALUE
)
@ThrowsExceptions({
@ThrowsException(status = HttpStatus.NOT_FOUND, exception = ReportNotFoundException.class),
@ThrowsException(status = HttpStatus.BAD_REQUEST, exception = ReportParameterValidationException.class)
})
ReportPage generateReport(@PathVariable("category") final String category,
@PathVariable("identifier") final String identifier,
@RequestBody final ReportRequest reportRequest,
@RequestParam(value = "pageIndex", required = false) final Integer pageIndex,
@RequestParam(value = "size", required = false) final Integer size);
@RequestMapping(
value = "categories/{category}/definitions/{identifier}",
method = RequestMethod.GET,
produces = MediaType.ALL_VALUE,
consumes = MediaType.APPLICATION_JSON_VALUE)
@ThrowsExceptions({
@ThrowsException(status = HttpStatus.NOT_FOUND, exception = ReportNotFoundException.class)
})
ReportDefinition findReportDefinition(@PathVariable("category") final String category,
@PathVariable("identifier") final String identifier);
}
| 6,084 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/client/ReportNotFoundException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.client;
public class ReportNotFoundException extends RuntimeException {
}
| 6,085 |
0 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1 | Create_ds/fineract-cn-reporting/api/src/main/java/org/apache/fineract/cn/reporting/api/v1/client/ReportParameterValidationException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.api.v1.client;
public class ReportParameterValidationException extends RuntimeException {
}
| 6,086 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/ReportingApplication.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service;
import org.springframework.boot.SpringApplication;
public class ReportingApplication {
public ReportingApplication() {
super();
}
public static void main(String[] args) {
SpringApplication.run(ReportingConfiguration.class, args);
}
}
| 6,087 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/ReportingConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service;
import org.apache.fineract.cn.anubis.config.EnableAnubis;
import org.apache.fineract.cn.cassandra.config.EnableCassandra;
import org.apache.fineract.cn.command.config.EnableCommandProcessing;
import org.apache.fineract.cn.lang.config.EnableApplicationName;
import org.apache.fineract.cn.lang.config.EnableServiceException;
import org.apache.fineract.cn.lang.config.EnableTenantContext;
import org.apache.fineract.cn.postgresql.config.EnablePostgreSQL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.web.servlet.config.annotation.PathMatchConfigurer;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
@SuppressWarnings("WeakerAccess")
@Configuration
@EnableAutoConfiguration
@EnableApplicationName
@EnableCommandProcessing
@EnableTenantContext
@EnableCassandra
@EnablePostgreSQL
@EnableAnubis
@EnableServiceException
@EnableJpaRepositories(basePackages = {
"org.apache.fineract.cn.reporting.service.internal.repository"})
@ComponentScan({
"org.apache.fineract.cn.reporting.service.rest",
"org.apache.fineract.cn.reporting.service.internal"
})
public class ReportingConfiguration extends WebMvcConfigurerAdapter {
public ReportingConfiguration() {
super();
}
@Bean(name = ServiceConstants.LOGGER_NAME)
public Logger logger() {
return LoggerFactory.getLogger(ServiceConstants.LOGGER_NAME);
}
@Override
public void configurePathMatch(final PathMatchConfigurer configurer) {
configurer.setUseSuffixPatternMatch(Boolean.FALSE);
}
}
| 6,088 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/ServiceConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service;
public interface ServiceConstants {
String LOGGER_NAME = "reporting-logger";
}
| 6,089 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/repository/DummyEntity.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.repository;
import javax.persistence.Entity;
import javax.persistence.Id;
@Entity
public class DummyEntity {
@Id
private Long id;
public DummyEntity() {
super();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
| 6,090 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/repository/DummyRepository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.repository;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface DummyRepository extends JpaRepository<DummyEntity, Long> {
}
| 6,091 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/provider/ReportSpecificationProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.provider;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import org.slf4j.Logger;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@Component
@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON)
public class ReportSpecificationProvider implements ApplicationContextAware {
private final Logger logger;
private final HashMap<String, ReportSpecification> reportSpecificationCache = new HashMap<>();
private final HashMap<String, List<ReportDefinition>> reportCategoryCache = new HashMap<>();
private ApplicationContext applicationContext;
@Autowired
public ReportSpecificationProvider(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger) {
super();
this.logger = logger;
}
@Override
public void setApplicationContext(final ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
this.initialize();
}
public List<String> getAvailableCategories() {
return new ArrayList<>(this.reportCategoryCache.keySet());
}
public List<ReportDefinition> getAvailableReports(final String category) {
this.logger.debug("Looking up report definitions for category {}.", category);
return this.reportCategoryCache.getOrDefault(category, Collections.emptyList());
}
public Optional<ReportSpecification> getReportSpecification(final String category, final String identifier) {
final String keyForReportSpecificationCache = this.buildKeyForSpecificationCache(category, identifier);
this.logger.debug("Looking up report specification for {}.", keyForReportSpecificationCache);
return Optional.ofNullable(this.reportSpecificationCache.get(keyForReportSpecificationCache));
}
private void initialize() {
final Map<String, Object> beansWithAnnotation = this.applicationContext.getBeansWithAnnotation(Report.class);
beansWithAnnotation.values().forEach(bean -> {
final ReportSpecification reportSpecification = ReportSpecification.class.cast(bean);
final Report report = reportSpecification.getClass().getAnnotation(Report.class);
final String keyForReportSpecificationCache =
this.buildKeyForSpecificationCache(report.category(), report.identifier());
this.logger.debug("Adding report specification for {}", keyForReportSpecificationCache);
this.reportCategoryCache.computeIfAbsent(report.category(), (key) -> new ArrayList<>());
this.reportCategoryCache.get(report.category()).add(reportSpecification.getReportDefinition());
this.reportSpecificationCache.put(keyForReportSpecificationCache, reportSpecification);
});
}
private String buildKeyForSpecificationCache(final String category, final String identifier) {
return category + "~" + identifier;
}
public Optional<ReportDefinition> findReportDefinition(final String category, final String identifier) {
final List<ReportDefinition> reportDefinitions = this.reportCategoryCache.get(category);
if (reportDefinitions != null) {
return reportDefinitions
.stream()
.filter(reportDefinition -> reportDefinition.getIdentifier().equals(identifier))
.findAny();
} else {
return Optional.empty();
}
}
}
| 6,092 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/specification/IncomeStatementReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.specification;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Header;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.api.v1.domain.Row;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import org.apache.fineract.cn.reporting.api.v1.domain.Value;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.DisplayableFieldBuilder;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.math.BigDecimal;
import java.time.Clock;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.fineract.cn.api.util.UserContextHolder;
import org.apache.fineract.cn.lang.DateConverter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Qualifier;
@Report(category = "Accounting", identifier = "Incomestatement")
public class IncomeStatementReportSpecification implements ReportSpecification {
private static final String DATE_RANGE = "Date range";
private static final String TYPE = "Type";
private static final String IDENTIFIER = "Identifier";
private static final String NAME = "Name";
private static final String BALANCE = "Balance";
private final Logger logger;
private final EntityManager entityManager;
private final HashMap<String, String> accountColumnMapping = new HashMap<>();
private final HashMap<String, String> allColumnMapping = new HashMap<>();
public IncomeStatementReportSpecification(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger,
final EntityManager entityManager){
super();
this.logger = logger;
this.entityManager = entityManager;
this.initializeMapping();
}
@Override
public ReportDefinition getReportDefinition() {
final ReportDefinition reportDefinition = new ReportDefinition();
reportDefinition.setIdentifier("Incomestatement");
reportDefinition.setName("Income Statement");
reportDefinition.setDescription("Income statement report");
reportDefinition.setQueryParameters(this.buildQueryParameters());
reportDefinition.setDisplayableFields(this.buildDisplayableFields());
return reportDefinition;
}
@Override
public ReportPage generateReport(ReportRequest reportRequest, int pageIndex, int size) {
final ReportDefinition reportDefinition = this.getReportDefinition();
this.logger.info("Generating report {0}.", reportDefinition.getIdentifier());
final ReportPage reportPage = new ReportPage();
reportPage.setName(reportDefinition.getName());
reportPage.setDescription(reportDefinition.getDescription());
reportPage.setHeader(this.createHeader(reportRequest.getDisplayableFields()));
final Query accountQuery = this.entityManager.createNativeQuery(this.buildAccountQuery(reportRequest, pageIndex, size));
final List<?> accountResultList = accountQuery.getResultList();
reportPage.setRows(this.buildRows(reportRequest, accountResultList));
reportPage.setHasMore(
!this.entityManager.createNativeQuery(this.buildAccountQuery(reportRequest, pageIndex + 1, size))
.getResultList().isEmpty()
);
reportPage.setGeneratedBy(UserContextHolder.checkedGetUser());
reportPage.setGeneratedOn(DateConverter.toIsoString(LocalDateTime.now(Clock.systemUTC())));
return reportPage;
}
@Override
public void validate(ReportRequest reportRequest) throws IllegalArgumentException {
final ArrayList<String> unknownFields = new ArrayList<>();
reportRequest.getQueryParameters().forEach(queryParameter -> {
if (!this.allColumnMapping.keySet().contains(queryParameter.getName())) {
unknownFields.add(queryParameter.getName());
}
});
reportRequest.getDisplayableFields().forEach(displayableField -> {
if (!this.allColumnMapping.keySet().contains(displayableField.getName())) {
unknownFields.add(displayableField.getName());
}
});
if (!unknownFields.isEmpty()) {
throw new IllegalArgumentException(
"Unspecified fields requested: " + unknownFields.stream().collect(Collectors.joining(", "))
);
}
}
private void initializeMapping() {
this.accountColumnMapping.put(DATE_RANGE, "acc.created_on");
this.accountColumnMapping.put(TYPE, "acc.a_type");
this.accountColumnMapping.put(IDENTIFIER, "acc.identifier");
this.accountColumnMapping.put(NAME, "acc.a_name");
this.accountColumnMapping.put(BALANCE, "acc.balance");
this.allColumnMapping.putAll(accountColumnMapping);
}
private Header createHeader(List<DisplayableField> displayableFields) {
final Header header = new Header();
header.setColumnNames(
displayableFields
.stream()
.map(DisplayableField::getName)
.collect(Collectors.toList())
);
return header;
}
private List<Row> buildRows(ReportRequest reportRequest, List<?> accountResultList) {
final ArrayList<Row> rows = new ArrayList<>();
final Row totalRevenueRow = new Row();
totalRevenueRow.setValues(new ArrayList<>());
final Value subRevenueTotal = new Value();
final BigDecimal[] revenueSubTotal = {new BigDecimal("0.000")};
accountResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
if (result instanceof Object[]) {
final Object[] resultValues;
resultValues = (Object[]) result;
for (int i = 0; i < resultValues.length; i++){
final Value revValue = new Value();
if (resultValues[i] != null){
revValue.setValues(new String[]{resultValues[i].toString()});
}else revValue.setValues(new String[]{});
row.getValues().add(revValue);
revenueSubTotal[0] = revenueSubTotal[0].add((BigDecimal)resultValues[3]);
}
} else {
final Value value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
rows.add(row);
});
subRevenueTotal.setValues(new String[]{new StringBuilder().append("TOTAL REVENUES ").append(revenueSubTotal[0]).toString()});
totalRevenueRow.getValues().add(subRevenueTotal);
rows.add(totalRevenueRow);
final String expenseQueryString = this.buildExpenseQuery(reportRequest);
final Query expenseQuery = this.entityManager.createNativeQuery(expenseQueryString);
final List<?> expenseResultList = expenseQuery.getResultList();
final Row totalExpenseRow = new Row();
totalExpenseRow.setValues(new ArrayList<>());
final Value subExpenseTotal = new Value();
final Row netIncomeRow = new Row();
netIncomeRow.setValues(new ArrayList<>());
final Value netIncomeTotal = new Value();
final BigDecimal[] expenseSubTotal = {new BigDecimal("0.000")};
expenseResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
if (result instanceof Object[]) {
final Object[] resultValues;
resultValues = (Object[]) result;
for (int i = 0; i < resultValues.length; i++){
final Value expValue = new Value();
if (resultValues[i] != null) expValue.setValues(new String[]{resultValues[i].toString()});
else expValue.setValues(new String[]{});
row.getValues().add(expValue);
expenseSubTotal[0] = expenseSubTotal[0].add((BigDecimal)resultValues[3]);
}
} else {
final Value value;
value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
rows.add(row);
});
subExpenseTotal.setValues(new String[]{new StringBuilder().append("TOTAL EXPENSES ").append(expenseSubTotal[0]).toString()});
totalExpenseRow.getValues().add(subExpenseTotal);
rows.add(totalExpenseRow);
final BigDecimal netIncome = revenueSubTotal[0].subtract(expenseSubTotal[0]);
netIncomeTotal.setValues(new String[]{new StringBuilder().append("NET INCOME ").append(netIncome).toString()});
netIncomeRow.getValues().add(netIncomeTotal);
rows.add(netIncomeRow);
return rows;
}
private String buildAccountQuery(final ReportRequest reportRequest, int pageIndex, int size) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.accountColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("thoth_accounts acc ")
.append("WHERE acc.a_type = 'REVENUE' ");
query.append(" ORDER BY acc.identifier");
return query.toString();
}
private String buildExpenseQuery(final ReportRequest reportRequest) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.accountColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("thoth_accounts acc ")
.append("WHERE acc.a_type = 'EXPENSE' ");
query.append(" ORDER BY acc.identifier");
return query.toString();
}
private List<DisplayableField> buildDisplayableFields() {
return Arrays.asList(
DisplayableFieldBuilder.create(TYPE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(IDENTIFIER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(NAME, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(BALANCE, Type.TEXT).mandatory().build()
);
}
private List<QueryParameter> buildQueryParameters() {
return Arrays.asList();
}
}
| 6,093 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/specification/TellerListReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.specification;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Header;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.api.v1.domain.Row;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import org.apache.fineract.cn.reporting.api.v1.domain.Value;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.CriteriaBuilder;
import org.apache.fineract.cn.reporting.service.spi.DisplayableFieldBuilder;
import org.apache.fineract.cn.reporting.service.spi.QueryParameterBuilder;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.time.Clock;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.fineract.cn.api.util.UserContextHolder;
import org.apache.fineract.cn.lang.DateConverter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@Report(category = "Teller" , identifier = "Listing")
public class TellerListReportSpecification implements ReportSpecification {
private static final String TELLER = "Teller";
private static final String EMPLOYEE = "Employee";
private static final String OFFICE = "Office";
private static final String CASHDRAW_LIMIT = "Cashdraw limit";
private static final String STATE = "State";
private static final String DATE_RANGE = "Date";
private final Logger logger;
private final EntityManager entityManager;
private final HashMap<String, String> tellerColumnMapping = new HashMap<>();
private final HashMap<String, String> allColumnMapping = new HashMap<>();
@Autowired
public TellerListReportSpecification(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger,
final EntityManager entityManager) {
super();
this.logger = logger;
this.entityManager = entityManager;
this.initializeMapping();
}
@Override
public ReportDefinition getReportDefinition() {
final ReportDefinition reportDefinition = new ReportDefinition();
reportDefinition.setIdentifier("Listing");
reportDefinition.setName("Teller Listing");
reportDefinition.setDescription("List of all Tellers.");
reportDefinition.setQueryParameters(this.buildQueryParameters());
reportDefinition.setDisplayableFields(this.buildDisplayableFields());
return reportDefinition;
}
@Override
public ReportPage generateReport(ReportRequest reportRequest, int pageIndex, int size) {
final ReportDefinition reportDefinition = this.getReportDefinition();
this.logger.info("Generating report {0}.", reportDefinition.getIdentifier());
final ReportPage reportPage = new ReportPage();
reportPage.setName(reportDefinition.getName());
reportPage.setDescription(reportDefinition.getDescription());
reportPage.setHeader(this.createHeader(reportRequest.getDisplayableFields()));
final Query tellerQuery = this.entityManager.createNativeQuery(this.buildTellerQuery(reportRequest, pageIndex, size));
final List<?> tellerResultList = tellerQuery.getResultList();
reportPage.setRows(this.buildRows(tellerResultList));
reportPage.setHasMore(
!this.entityManager.createNativeQuery(this.buildTellerQuery(reportRequest, pageIndex + 1, size))
.getResultList().isEmpty()
);
reportPage.setGeneratedBy(UserContextHolder.checkedGetUser());
reportPage.setGeneratedOn(DateConverter.toIsoString(LocalDateTime.now(Clock.systemUTC())));
return reportPage;
}
@Override
public void validate(final ReportRequest reportRequest) throws IllegalArgumentException {
final ArrayList<String> unknownFields = new ArrayList<>();
reportRequest.getQueryParameters().forEach(queryParameter -> {
if (!this.allColumnMapping.keySet().contains(queryParameter.getName())) {
unknownFields.add(queryParameter.getName());
}
});
reportRequest.getDisplayableFields().forEach(displayableField -> {
if (!this.allColumnMapping.keySet().contains(displayableField.getName())) {
unknownFields.add(displayableField.getName());
}
});
if (!unknownFields.isEmpty()) {
throw new IllegalArgumentException(
"Unspecified fields requested: " + unknownFields.stream().collect(Collectors.joining(", "))
);
}
}
private void initializeMapping() {
this.tellerColumnMapping.put(TELLER, "tl.identifier");
this.tellerColumnMapping.put(OFFICE, "tl.office_identifier");
this.tellerColumnMapping.put(CASHDRAW_LIMIT, "tl.cashdraw_limit");
this.tellerColumnMapping.put(EMPLOYEE, "tl.assigned_employee_identifier");
this.tellerColumnMapping.put(STATE, "tl.a_state");
this.tellerColumnMapping.put(DATE_RANGE, "tl.created_on");
this.allColumnMapping.putAll(tellerColumnMapping);
}
private Header createHeader(List<DisplayableField> displayableFields) {
final Header header = new Header();
header.setColumnNames(
displayableFields
.stream()
.map(DisplayableField::getName)
.collect(Collectors.toList())
);
return header;
}
private List<Row> buildRows(final List<?> tellerResultList) {
final ArrayList<Row> rows = new ArrayList<>();
tellerResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
if (result instanceof Object[]) {
final Object[] resultValues = (Object[]) result;
for(final Object resultVal : resultValues) {
final Value val;
val = new Value();
if (resultVal != null) {
val.setValues(new String[]{resultVal.toString()});
} else {
val.setValues(new String[]{});
}
row.getValues().add(val);
}
} else {
final Value value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
rows.add(row);
});
return rows;
}
private List<QueryParameter> buildQueryParameters() {
return Arrays.asList(
QueryParameterBuilder.create(DATE_RANGE, Type.DATE).operator(QueryParameter.Operator.BETWEEN).build(),
QueryParameterBuilder.create(STATE, Type.TEXT).operator(QueryParameter.Operator.IN).build()
);
}
private List<DisplayableField> buildDisplayableFields() {
return Arrays.asList(
DisplayableFieldBuilder.create(TELLER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(OFFICE, Type.TEXT).build(),
DisplayableFieldBuilder.create(EMPLOYEE, Type.TEXT).build(),
DisplayableFieldBuilder.create(CASHDRAW_LIMIT, Type.TEXT).build(),
DisplayableFieldBuilder.create(STATE, Type.TEXT).build()
);
}
private String buildTellerQuery(ReportRequest reportRequest, int pageIndex, int size) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.tellerColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("tajet_teller tl ");
final List<QueryParameter> queryParameters = reportRequest.getQueryParameters();
if (!queryParameters.isEmpty()) {
final ArrayList<String> criteria = new ArrayList<>();
queryParameters.forEach(queryParameter -> {
if(queryParameter.getValue() != null && !queryParameter.getValue().isEmpty()) {
criteria.add(
CriteriaBuilder.buildCriteria(this.tellerColumnMapping.get(queryParameter.getName()), queryParameter)
);
}
});
if (!criteria.isEmpty()) {
query.append(" WHERE ");
query.append(criteria.stream().collect(Collectors.joining(" AND ")));
}
}
query.append(" ORDER BY tl.identifier");
query.append(" LIMIT ");
query.append(size);
if (pageIndex > 0) {
query.append(" OFFSET ");
query.append(size * pageIndex);
}
return query.toString();
}
}
| 6,094 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/specification/DepositListReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.specification;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Header;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.api.v1.domain.Row;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import org.apache.fineract.cn.reporting.api.v1.domain.Value;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.CriteriaBuilder;
import org.apache.fineract.cn.reporting.service.spi.DisplayableFieldBuilder;
import org.apache.fineract.cn.reporting.service.spi.QueryParameterBuilder;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.time.Clock;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.fineract.cn.api.util.UserContextHolder;
import org.apache.fineract.cn.lang.DateConverter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@Report(category = "Deposit", identifier = "Listing")
public class DepositListReportSpecification implements ReportSpecification {
private static final String CUSTOMER = "Customer Account";
private static final String FIRST_NAME = "First Name";
private static final String MIDDLE_NAME = "Middle Name";
private static final String LAST_NAME = "Last Name";
private static final String EMPLOYEE = "Created By";
private static final String ACCOUNT_NUMBER = "Deposit Account";
private static final String PRODUCT = "Product";
private static final String ACCOUNT_TYPE = "Deposit Type";
private static final String STATE = "Status";
private static final String OFFICE = "Office";
private static final String DATE_RANGE = "Date Created";
private final EntityManager entityManager;
private final Logger logger;
private final HashMap<String, String> customerColumnMapping = new HashMap<>();
private final HashMap<String, String> depositAccountColumnMapping = new HashMap<>();
private final HashMap<String, String> depositProductColumnMapping = new HashMap<>();
private final HashMap<String, String> allColumnMapping = new HashMap<>();
@Autowired
public DepositListReportSpecification(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger, final EntityManager entityManager) {
this.entityManager = entityManager;
this.logger = logger;
this.initializeMapping();
}
@Override
public ReportDefinition getReportDefinition() {
final ReportDefinition reportDefinition = new ReportDefinition();
reportDefinition.setIdentifier("Listing");
reportDefinition.setName("Deposit Account Listing");
reportDefinition.setDescription("List of all deposit accounts.");
reportDefinition.setQueryParameters(this.buildQueryParameters());
reportDefinition.setDisplayableFields(this.buildDisplayableFields());
return reportDefinition;
}
@Override
public ReportPage generateReport(ReportRequest reportRequest, int pageIndex, int size) {
final ReportDefinition reportDefinition = this.getReportDefinition();
this.logger.info("Generating report {0} ", reportDefinition.getIdentifier());
final ReportPage reportPage = new ReportPage();
reportPage.setName(reportDefinition.getName());
reportPage.setDescription(reportDefinition.getDescription());
reportPage.setHeader(this.createHeader(reportRequest.getDisplayableFields()));
final Query customerQuery = this.entityManager.createNativeQuery(this.buildCustomerQuery(reportRequest, pageIndex, size));
final List<?> customerResultList = customerQuery.getResultList();
reportPage.setRows(this.buildRows(reportRequest, customerResultList));
reportPage.setHasMore(
!this.entityManager.createNativeQuery(this.buildCustomerQuery(reportRequest, pageIndex + 1, size))
.getResultList().isEmpty()
);
reportPage.setGeneratedBy(UserContextHolder.checkedGetUser());
reportPage.setGeneratedOn(DateConverter.toIsoString(LocalDateTime.now(Clock.systemUTC())));
return reportPage;
}
@Override
public void validate(ReportRequest reportRequest) throws IllegalArgumentException {
final ArrayList<String> unknownFields = new ArrayList<>();
reportRequest.getQueryParameters().forEach(queryParameter -> {
if (!this.allColumnMapping.keySet().contains(queryParameter.getName())) {
unknownFields.add(queryParameter.getName());
}
});
reportRequest.getDisplayableFields().forEach(displayableField -> {
if (!this.allColumnMapping.keySet().contains(displayableField.getName())) {
unknownFields.add(displayableField.getName());
}
});
if (!unknownFields.isEmpty()) {
throw new IllegalArgumentException(
"Unspecified fields requested: " + unknownFields.stream().collect(Collectors.joining(", "))
);
}
}
private void initializeMapping() {
this.customerColumnMapping.put(CUSTOMER, "cst.identifier");
this.customerColumnMapping.put(FIRST_NAME, "cst.given_name");
this.customerColumnMapping.put(MIDDLE_NAME, "cst.middle_name");
this.customerColumnMapping.put(LAST_NAME, "cst.surname");
this.customerColumnMapping.put(OFFICE, "cst.assigned_office");
this.depositAccountColumnMapping.put(EMPLOYEE, "pi.created_by");
this.depositAccountColumnMapping.put(ACCOUNT_NUMBER, "pi.account_identifier");
this.depositAccountColumnMapping.put(STATE, "pi.a_state");
this.depositAccountColumnMapping.put(PRODUCT, "pi.product_definition_id");
this.depositAccountColumnMapping.put(DATE_RANGE, "pi.created_on");
this.depositProductColumnMapping.put(ACCOUNT_TYPE, "pd.a_name, pd.a_type");
this.allColumnMapping.putAll(customerColumnMapping);
this.allColumnMapping.putAll(depositProductColumnMapping);
this.allColumnMapping.putAll(depositAccountColumnMapping);
}
private Header createHeader(final List<DisplayableField> displayableFields) {
final Header header = new Header();
header.setColumnNames(
displayableFields
.stream()
.map(DisplayableField::getName)
.collect(Collectors.toList())
);
return header;
}
private List<Row> buildRows(final ReportRequest reportRequest, final List<?> customerResultList) {
final ArrayList<Row> rows = new ArrayList<>();
customerResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
final String customerIdentifier;
if (result instanceof Object[]) {
final Object[] resultValues = (Object[]) result;
customerIdentifier = resultValues[0].toString();
for (final Object resultValue : resultValues) {
final Value value = new Value();
if (resultValue != null) {
value.setValues(new String[]{resultValue.toString()});
} else {
value.setValues(new String[]{});
}
row.getValues().add(value);
}
} else {
customerIdentifier = result.toString();
final Value value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
final Query accountQuery = this.entityManager.createNativeQuery(this.buildDepositAccountQuery(reportRequest, customerIdentifier));
final List<?> accountResultList = accountQuery.getResultList();
final ArrayList<String> products = new ArrayList<>();
final ArrayList<String> depositAccountNumber = new ArrayList<>();
final ArrayList<String> depositType = new ArrayList<>();
final ArrayList<String> status = new ArrayList<>();
final ArrayList<String> createdBy = new ArrayList<>();
final ArrayList<String> dateCreated = new ArrayList<>();
accountResultList.forEach(accountResult -> {
final String productIdentifier;
if (accountResult instanceof Object[]) {
final Object[] accountResultValues = (Object[]) accountResult;
productIdentifier = accountResultValues[0].toString();
final Query depositProductQuery = this.entityManager.createNativeQuery(this.buildDepositProductQuery(reportRequest, productIdentifier));
final List<?> depositProductResultList = depositProductQuery.getResultList();
depositProductResultList.forEach(product -> {
final Object[] productResultValues = (Object[]) product;
for (int i = 0; i < productResultValues.length; i++) {
if (i == 0 && productResultValues[0] != null) {
products.add(productResultValues[0].toString());
}
if (i == 1 && productResultValues[1] != null) {
depositType.add(productResultValues[1].toString());
}
}
});
for (int i = 1; i < accountResultValues.length ; i++) {
if (i == 1 && accountResultValues[1] != null){
depositAccountNumber.add(accountResultValues[1].toString());
}
if (i == 2 && accountResultValues[2] != null){
status.add(accountResultValues[2].toString());
}
if (i == 3 && accountResultValues[3] != null){
createdBy.add(accountResultValues[3].toString());
}
if (i == 4 && accountResultValues[4] != null){
dateCreated.add(accountResultValues[4].toString());
}
}
}
});
final Value productValue = new Value();
productValue.setValues(products.toArray(new String[products.size()]));
row.getValues().add(productValue);
final Value depositTypeValue = new Value();
depositTypeValue.setValues(depositType.toArray(new String[depositAccountNumber.size()]));
row.getValues().add(depositTypeValue);
final Value depositAccountNumberValue = new Value();
depositAccountNumberValue.setValues(depositAccountNumber.toArray(new String[depositType.size()]));
row.getValues().add(depositAccountNumberValue);
final Value statusValue = new Value();
statusValue.setValues(status.toArray(new String[status.size()]));
row.getValues().add(statusValue);
final Value createdByValue = new Value();
createdByValue.setValues(createdBy.toArray(new String[createdBy.size()]));
row.getValues().add(createdByValue);
final Value dateCreatedValue = new Value();
dateCreatedValue.setValues(dateCreated.toArray(new String[dateCreated.size()]));
row.getValues().add(dateCreatedValue);
rows.add(row);
});
return rows;
}
private String buildCustomerQuery(final ReportRequest reportRequest, int pageIndex, int size) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields;
displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.customerColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("maat_customers cst ");
final List<QueryParameter> queryParameters = reportRequest.getQueryParameters();
if (!queryParameters.isEmpty()) {
final ArrayList<String> criteria = new ArrayList<>();
queryParameters.forEach(queryParameter -> {
if(queryParameter.getValue() != null && !queryParameter.getValue().isEmpty()) {
criteria.add(
CriteriaBuilder.buildCriteria(this.customerColumnMapping.get(queryParameter.getName()), queryParameter)
);
}
});
if (!criteria.isEmpty()) {
query.append(" WHERE ");
query.append(criteria.stream().collect(Collectors.joining(" AND ")));
}
}
query.append(" ORDER BY cst.identifier");
query.append(" LIMIT ");
query.append(size);
if (pageIndex > 0) {
query.append(" OFFSET ");
query.append(size * pageIndex);
}
return query.toString();
}
private String buildDepositAccountQuery(final ReportRequest reportRequest, final String customerIdentifier) {
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.depositAccountColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
return "SELECT " + columns.stream().collect(Collectors.joining(", ")) + " " +
"FROM shed_product_instances pi " +
"LEFT JOIN maat_customers cst on pi.customer_identifier = cst.identifier " +
"WHERE cst.identifier ='" + customerIdentifier + "' " +
"ORDER BY pi.account_identifier";
}
private String buildDepositProductQuery(final ReportRequest reportRequest, final String productIdentifier){
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.depositProductColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
return "SELECT DISTINCT " + columns.stream().collect(Collectors.joining(", ")) + " " +
"FROM shed_product_definitions pd " +
"LEFT JOIN shed_product_instances pi on pd.id = pi.product_definition_id " +
"WHERE pi.product_definition_id ='" + productIdentifier + "' ";
}
private List<DisplayableField> buildDisplayableFields() {
return Arrays.asList(
DisplayableFieldBuilder.create(CUSTOMER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(FIRST_NAME, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(MIDDLE_NAME, Type.TEXT).build(),
DisplayableFieldBuilder.create(LAST_NAME, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(OFFICE, Type.TEXT).build(),
DisplayableFieldBuilder.create(PRODUCT, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(ACCOUNT_TYPE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(ACCOUNT_NUMBER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(STATE,Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(EMPLOYEE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(DATE_RANGE, Type.DATE).mandatory().build()
);
}
private List<QueryParameter> buildQueryParameters() {
return Arrays.asList(
QueryParameterBuilder.create(DATE_RANGE, Type.DATE).operator(QueryParameter.Operator.BETWEEN).build(),
QueryParameterBuilder.create(STATE, Type.TEXT).operator(QueryParameter.Operator.IN).build()
);
}
}
| 6,095 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/specification/LoanListReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.specification;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Header;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.api.v1.domain.Row;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import org.apache.fineract.cn.reporting.api.v1.domain.Value;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.CriteriaBuilder;
import org.apache.fineract.cn.reporting.service.spi.DisplayableFieldBuilder;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.time.Clock;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.fineract.cn.api.util.UserContextHolder;
import org.apache.fineract.cn.lang.DateConverter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@Report(category = "Loan", identifier = "Listing")
public class LoanListReportSpecification implements ReportSpecification {
private static final String CUSTOMER = "Customer";
private static final String FIRST_NAME = "First Name";
private static final String MIDDLE_NAME = "Middle Name";
private static final String LAST_NAME = "Last Name";
private static final String LOAN_TERM = "Loan Term";
private static final String TIME_UNIT = "Time Unit";
private static final String OFFICE = "Office";
private static final String PRINCIPAL = "Principal";
private static final String CASE = "Case Id";
private static final String LOAN = "Loan";
private static final String PRODUCT = "Type";
private static final String STATE = "State";
private static final String DATE_RANGE = "Created On";
private static final String EMPLOYEE = "Created By";
private final Logger logger;
private final EntityManager entityManager;
private final HashMap<String, String> customerColumnMapping = new HashMap<>();
private final HashMap<String, String> loanColumnMapping = new HashMap<>();
private final HashMap<String, String> caseColumnMapping = new HashMap<>();
private final HashMap<String, String> allColumnMapping = new HashMap<>();
@Autowired
public LoanListReportSpecification(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger,
final EntityManager entityManager) {
super();
this.logger = logger;
this.entityManager = entityManager;
this.initializeMapping();
}
@Override
public ReportDefinition getReportDefinition() {
final ReportDefinition reportDefinition = new ReportDefinition();
reportDefinition.setIdentifier("Listing");
reportDefinition.setName("Loan Account Listing");
reportDefinition.setDescription("List of all loan accounts.");
reportDefinition.setQueryParameters(this.buildQueryParameters());
reportDefinition.setDisplayableFields(this.buildDisplayableFields());
return reportDefinition;
}
@Override
public ReportPage generateReport(ReportRequest reportRequest, int pageIndex, int size) {
final ReportDefinition reportDefinition = this.getReportDefinition();
this.logger.info("Generating report {0}.", reportDefinition.getIdentifier());
final ReportPage reportPage = new ReportPage();
reportPage.setName(reportDefinition.getName());
reportPage.setDescription(reportDefinition.getDescription());
reportPage.setHeader(this.createHeader(reportRequest.getDisplayableFields()));
final Query customerQuery;
customerQuery = this.entityManager.createNativeQuery(this.buildCustomerQuery(reportRequest, pageIndex, size));
final List<?> customerResultList = customerQuery.getResultList();
reportPage.setRows(this.buildRows(reportRequest, customerResultList));
reportPage.setHasMore(
!this.entityManager.createNativeQuery(this.buildCustomerQuery(reportRequest, pageIndex + 1, size))
.getResultList().isEmpty()
);
reportPage.setGeneratedBy(UserContextHolder.checkedGetUser());
reportPage.setGeneratedOn(DateConverter.toIsoString(LocalDateTime.now(Clock.systemUTC())));
return reportPage;
}
@Override
public void validate(ReportRequest reportRequest) throws IllegalArgumentException {
final ArrayList<String> unknownFields = new ArrayList<>();
reportRequest.getQueryParameters().forEach(queryParameter -> {
if (!this.allColumnMapping.keySet().contains(queryParameter.getName())) {
unknownFields.add(queryParameter.getName());
}
});
reportRequest.getDisplayableFields().forEach(displayableField -> {
if (!this.allColumnMapping.keySet().contains(displayableField.getName())) {
unknownFields.add(displayableField.getName());
}
});
if (!unknownFields.isEmpty()) {
throw new IllegalArgumentException(
"Unspecified fields requested: " + unknownFields.stream().collect(Collectors.joining(", "))
);
}
}
private void initializeMapping() {
this.customerColumnMapping.put(CUSTOMER, "cst.identifier");
this.customerColumnMapping.put(FIRST_NAME, "cst.given_name");
this.customerColumnMapping.put(MIDDLE_NAME, "cst.middle_name");
this.customerColumnMapping.put(LAST_NAME, "cst.surname");
this.customerColumnMapping.put(OFFICE, "cst.assigned_office");
this.loanColumnMapping.put(LOAN_TERM, "il_cases.term_range_maximum");
this.loanColumnMapping.put(TIME_UNIT, "il_cases.term_range_temporal_unit");
this.loanColumnMapping.put(PRINCIPAL, "il_cases.balance_range_maximum");
this.loanColumnMapping.put(CASE, "il_cases.case_id");
this.caseColumnMapping.put(LOAN, "cases.identifier");
this.caseColumnMapping.put(PRODUCT, "cases.product_identifier");
this.caseColumnMapping.put(STATE, "cases.current_state");
this.caseColumnMapping.put(DATE_RANGE, "cases.created_on");
this.caseColumnMapping.put(EMPLOYEE, "cases.created_by");
this.allColumnMapping.putAll(customerColumnMapping);
this.allColumnMapping.putAll(loanColumnMapping);
this.allColumnMapping.putAll(caseColumnMapping);
}
private Header createHeader(List<DisplayableField> displayableFields) {
final Header header = new Header();
header.setColumnNames(
displayableFields
.stream()
.map(DisplayableField::getName)
.collect(Collectors.toList())
);
return header;
}
private List<Row> buildRows(ReportRequest reportRequest, List<?> customerResultList) {
final ArrayList<Row> rows = new ArrayList<>();
customerResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
final String customerIdentifier;
if (result instanceof Object[]) {
final Object[] resultValues;
resultValues = (Object[]) result;
customerIdentifier = resultValues[0].toString();
for (final Object resultValue : resultValues) {
final Value value = new Value();
if (resultValue != null) {
value.setValues(new String[]{resultValue.toString()});
} else {
value.setValues(new String[]{});
}
row.getValues().add(value);
}
} else {
customerIdentifier = result.toString();
final Value value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
final Query accountQuery = this.entityManager.createNativeQuery(this.buildLoanAccountQuery(reportRequest, customerIdentifier));
final List<?> accountResultList = accountQuery.getResultList();
accountResultList.forEach(accountResult -> {
final String caseIdentifier;
if (accountResult instanceof Object[]) {
final Object[] accountResultValues;
accountResultValues = (Object[]) accountResult;
caseIdentifier = accountResultValues[0].toString();
for (final Object loan: accountResultValues) {
final Value value = new Value();
if (loan != null) {
value.setValues(new String[]{loan.toString()});
} else {
value.setValues(new String[]{});
}
row.getValues().add(value);
}
}else {
caseIdentifier = accountResult.toString();
final Value value = new Value();
value.setValues(new String[]{accountResult.toString()});
row.getValues().add(value);
}
final Query caseQuery = this.entityManager.createNativeQuery(this.buildCaseQuery(reportRequest, caseIdentifier));
final List<?> caseResultList = caseQuery.getResultList();
caseResultList.forEach(loanCase -> {
final Object[] loanCaseResultValues = (Object[]) loanCase;
for (final Object loan : loanCaseResultValues) {
final Value value = new Value();
if (loan != null) {
value.setValues(new String[]{loan.toString()});
} else {
value.setValues(new String[]{});
}
row.getValues().add(value);
}
});
});
rows.add(row);
});
return rows;
}
private List<DisplayableField> buildDisplayableFields() {
return Arrays.asList(
DisplayableFieldBuilder.create(CUSTOMER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(FIRST_NAME, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(MIDDLE_NAME, Type.TEXT).build(),
DisplayableFieldBuilder.create(LAST_NAME, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(OFFICE, Type.TEXT).build(),
DisplayableFieldBuilder.create(CASE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(PRINCIPAL, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(LOAN_TERM, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(TIME_UNIT, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(LOAN, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(STATE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(EMPLOYEE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(PRODUCT, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(DATE_RANGE, Type.TEXT).mandatory().build()
);
}
private List<QueryParameter> buildQueryParameters() {
return Arrays.asList();
}
private String buildCustomerQuery(final ReportRequest reportRequest, int pageIndex, int size){
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column;
column = this.customerColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("maat_customers cst ");
final List<QueryParameter> queryParameters = reportRequest.getQueryParameters();
if (!queryParameters.isEmpty()) {
final ArrayList<String> criteria = new ArrayList<>();
queryParameters.forEach(queryParameter -> {
if((queryParameter.getValue() != null) && !queryParameter.getValue().isEmpty()) {
criteria.add(
CriteriaBuilder.buildCriteria(this.customerColumnMapping.get(queryParameter.getName()), queryParameter)
);
}
});
if (!criteria.isEmpty()) {
query.append(" WHERE ");
query.append(criteria.stream().collect(Collectors.joining(" AND ")));
}
}
query.append(" ORDER BY cst.identifier");
query.append(" LIMIT ");
query.append(size);
if (pageIndex > 0) {
query.append(" OFFSET ");
query.append(size * pageIndex);
}
return query.toString();
}
private String buildLoanAccountQuery(final ReportRequest reportRequest, final String customerIdentifier){
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.loanColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
return "SELECT " + columns.stream().collect(Collectors.joining(", ")) + " " +
"FROM bastet_il_cases il_cases " +
"LEFT JOIN maat_customers cst on il_cases.customer_identifier = cst.identifier " +
"WHERE cst.identifier ='" + customerIdentifier + "' " +
"ORDER BY il_cases.case_id";
}
private String buildCaseQuery(final ReportRequest reportRequest, final String caseIdentifier){
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.caseColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
return "SELECT " + columns.stream().collect(Collectors.joining(", ")) + " " +
"FROM bastet_cases cases " +
"LEFT JOIN bastet_il_cases il_cases on cases.id = il_cases.case_id " +
"WHERE il_cases.case_id ='" + caseIdentifier + "' ";
}
}
| 6,096 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/specification/CustomerListReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.specification;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Header;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.api.v1.domain.Row;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import org.apache.fineract.cn.reporting.api.v1.domain.Value;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.CriteriaBuilder;
import org.apache.fineract.cn.reporting.service.spi.DisplayableFieldBuilder;
import org.apache.fineract.cn.reporting.service.spi.QueryParameterBuilder;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.text.DecimalFormat;
import java.time.Clock;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.fineract.cn.api.util.UserContextHolder;
import org.apache.fineract.cn.lang.DateConverter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@Report(category = "Customer", identifier = "Listing")
public class CustomerListReportSpecification implements ReportSpecification {
private static final String DATE_RANGE = "Date range";
private static final String STATE = "State";
private static final String CUSTOMER = "Customer";
private static final String FIRST_NAME = "First name";
private static final String MIDDLE_NAME = "Middle name";
private static final String LAST_NAME = "Last name";
private static final String ACCOUNT_NUMBER = "Account number";
private static final String ADDRESS = "Address";
private final Logger logger;
private final EntityManager entityManager;
private final HashMap<String, String> customerColumnMapping = new HashMap<>();
private final HashMap<String, String> addressColumnMapping = new HashMap<>();
private final HashMap<String, String> accountColumnMapping = new HashMap<>();
private final HashMap<String, String> allColumnMapping = new HashMap<>();
@Autowired
public CustomerListReportSpecification(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger,
final EntityManager entityManager) {
super();
this.logger = logger;
this.entityManager = entityManager;
this.initializeMapping();
}
@Override
public ReportDefinition getReportDefinition() {
final ReportDefinition reportDefinition = new ReportDefinition();
reportDefinition.setIdentifier("Listing");
reportDefinition.setName("Customer Listing");
reportDefinition.setDescription("List of all customers.");
reportDefinition.setQueryParameters(this.buildQueryParameters());
reportDefinition.setDisplayableFields(this.buildDisplayableFields());
return reportDefinition;
}
@Override
public ReportPage generateReport(final ReportRequest reportRequest, final int pageIndex, final int size) {
final ReportDefinition reportDefinition = this.getReportDefinition();
this.logger.info("Generating report {0}.", reportDefinition.getIdentifier());
final ReportPage reportPage = new ReportPage();
reportPage.setName(reportDefinition.getName());
reportPage.setDescription(reportDefinition.getDescription());
reportPage.setHeader(this.createHeader(reportRequest.getDisplayableFields()));
final Query customerQuery = this.entityManager.createNativeQuery(this.buildCustomerQuery(reportRequest, pageIndex, size));
final List<?> customerResultList = customerQuery.getResultList();
reportPage.setRows(this.buildRows(reportRequest, customerResultList));
reportPage.setHasMore(
!this.entityManager.createNativeQuery(this.buildCustomerQuery(reportRequest, pageIndex + 1, size))
.getResultList().isEmpty()
);
reportPage.setGeneratedBy(UserContextHolder.checkedGetUser());
reportPage.setGeneratedOn(DateConverter.toIsoString(LocalDateTime.now(Clock.systemUTC())));
return reportPage;
}
@Override
public void validate(final ReportRequest reportRequest) throws IllegalArgumentException {
final ArrayList<String> unknownFields = new ArrayList<>();
reportRequest.getQueryParameters().forEach(queryParameter -> {
if (!this.allColumnMapping.keySet().contains(queryParameter.getName())) {
unknownFields.add(queryParameter.getName());
}
});
reportRequest.getDisplayableFields().forEach(displayableField -> {
if (!this.allColumnMapping.keySet().contains(displayableField.getName())) {
unknownFields.add(displayableField.getName());
}
});
if (!unknownFields.isEmpty()) {
throw new IllegalArgumentException(
"Unspecified fields requested: " + unknownFields.stream().collect(Collectors.joining(", "))
);
}
}
private void initializeMapping() {
this.customerColumnMapping.put(DATE_RANGE, "cst.created_on");
this.customerColumnMapping.put(STATE, "cst.current_state");
this.customerColumnMapping.put(CUSTOMER, "cst.identifier");
this.customerColumnMapping.put(FIRST_NAME, "cst.given_name");
this.customerColumnMapping.put(MIDDLE_NAME, "cst.middle_name");
this.customerColumnMapping.put(LAST_NAME, "cst.surname");
this.accountColumnMapping.put(ACCOUNT_NUMBER, "acc.identifier, acc.balance");
this.addressColumnMapping.put(ADDRESS, "CONCAT(adr.street, ', ', adr.postal_code, ', ', adr.city)");
this.allColumnMapping.putAll(customerColumnMapping);
this.allColumnMapping.putAll(accountColumnMapping);
this.allColumnMapping.putAll(addressColumnMapping);
}
private Header createHeader(final List<DisplayableField> displayableFields) {
final Header header = new Header();
header.setColumnNames(
displayableFields
.stream()
.map(DisplayableField::getName)
.collect(Collectors.toList())
);
return header;
}
private List<Row> buildRows(final ReportRequest reportRequest, final List<?> customerResultList) {
final ArrayList<Row> rows = new ArrayList<>();
customerResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
final String customerIdentifier;
if (result instanceof Object[]) {
final Object[] resultValues = (Object[]) result;
customerIdentifier = resultValues[0].toString();
for (final Object resultValue : resultValues) {
final Value value = new Value();
if (resultValue != null) {
value.setValues(new String[]{resultValue.toString()});
} else {
value.setValues(new String[]{});
}
row.getValues().add(value);
}
} else {
customerIdentifier = result.toString();
final Value value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
final DecimalFormat decimalFormat = new DecimalFormat("0.00");
final Query accountQuery = this.entityManager.createNativeQuery(this.buildAccountQuery(reportRequest, customerIdentifier));
final List<?> accountResultList = accountQuery.getResultList();
final ArrayList<String> values = new ArrayList<>();
accountResultList.forEach(accountResult -> {
if (accountResult instanceof Object[]) {
final Object[] accountResultValues = (Object[]) accountResult;
final String accountValue = accountResultValues[0].toString() + " (" +
decimalFormat.format(Double.valueOf(accountResultValues[1].toString())) + ")";
values.add(accountValue);
}
});
final Value accountValue = new Value();
accountValue.setValues(values.toArray(new String[values.size()]));
row.getValues().add(accountValue);
final String addressQueryString = this.buildAddressQuery(reportRequest, customerIdentifier);
if (addressQueryString != null) {
final Query addressQuery = this.entityManager.createNativeQuery(addressQueryString);
final List<?> resultList = addressQuery.getResultList();
final Value addressValue = new Value();
addressValue.setValues(new String[]{resultList.get(0).toString()});
row.getValues().add(addressValue);
}
rows.add(row);
});
return rows;
}
private List<QueryParameter> buildQueryParameters() {
return Arrays.asList(
QueryParameterBuilder.create(DATE_RANGE, Type.DATE).operator(QueryParameter.Operator.BETWEEN).build(),
QueryParameterBuilder.create(STATE, Type.TEXT).operator(QueryParameter.Operator.IN).build()
);
}
private List<DisplayableField> buildDisplayableFields() {
return Arrays.asList(
DisplayableFieldBuilder.create(CUSTOMER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(FIRST_NAME, Type.TEXT).build(),
DisplayableFieldBuilder.create(MIDDLE_NAME, Type.TEXT).build(),
DisplayableFieldBuilder.create(LAST_NAME, Type.TEXT).build(),
DisplayableFieldBuilder.create(ACCOUNT_NUMBER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(ADDRESS, Type.TEXT).build()
);
}
private String buildCustomerQuery(final ReportRequest reportRequest, int pageIndex, int size) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.customerColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("maat_customers cst ");
final List<QueryParameter> queryParameters = reportRequest.getQueryParameters();
if (!queryParameters.isEmpty()) {
final ArrayList<String> criteria = new ArrayList<>();
queryParameters.forEach(queryParameter -> {
if(queryParameter.getValue() != null && !queryParameter.getValue().isEmpty()) {
criteria.add(
CriteriaBuilder.buildCriteria(this.customerColumnMapping.get(queryParameter.getName()), queryParameter)
);
}
});
if (!criteria.isEmpty()) {
query.append(" WHERE ");
query.append(criteria.stream().collect(Collectors.joining(" AND ")));
}
}
query.append(" ORDER BY cst.identifier");
query.append(" LIMIT ");
query.append(size);
if (pageIndex > 0) {
query.append(" OFFSET ");
query.append(size * pageIndex);
}
return query.toString();
}
private String buildAccountQuery(final ReportRequest reportRequest, final String customerIdentifier) {
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.accountColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
return "SELECT " + columns.stream().collect(Collectors.joining(", ")) + " " +
"FROM thoth_accounts acc " +
"LEFT JOIN maat_customers cst on acc.holders = cst.identifier " +
"WHERE cst.identifier ='" + customerIdentifier + "' " +
"ORDER BY acc.identifier";
}
private String buildAddressQuery(final ReportRequest reportRequest, final String customerIdentifier) {
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.addressColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
if (!columns.isEmpty()) {
return "SELECT " + columns.stream().collect(Collectors.joining(", ")) + " " +
"FROM maat_addresses adr " +
"LEFT JOIN maat_customers cst on adr.id = cst.address_id " +
"WHERE cst.identifier ='" + customerIdentifier + "' ";
}
return null;
}
}
| 6,097 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/specification/TellerTransactionReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.specification;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Header;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.api.v1.domain.Row;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import org.apache.fineract.cn.reporting.api.v1.domain.Value;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.CriteriaBuilder;
import org.apache.fineract.cn.reporting.service.spi.DisplayableFieldBuilder;
import org.apache.fineract.cn.reporting.service.spi.QueryParameterBuilder;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.time.Clock;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.fineract.cn.api.util.UserContextHolder;
import org.apache.fineract.cn.lang.DateConverter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@Report(category = "Teller", identifier = "Transactions")
public class TellerTransactionReportSpecification implements ReportSpecification {
private static final String TELLER_ID = "Teller Id";
private static final String TELLER = "Teller";
private static final String TRANSACTION_TYPE = "Transaction Type";
private static final String TRANSACTION_DATE = "Transaction Date";
private static final String CUSTOMER = "Customer";
private static final String SOURCE = "Source Account";
private static final String TARGET = "Target Account";
private static final String CLERK = "Clerk";
private static final String AMOUNT = "Amount";
private static final String STATUS = "Status";
private final Logger logger;
private final EntityManager entityManager;
private final HashMap<String, String> tellerColumnMapping = new HashMap<>();
private final HashMap<String, String> transactionColumnMapping = new HashMap<>();
private final HashMap<String, String> allColumnMapping = new HashMap<>();
@Autowired
public TellerTransactionReportSpecification(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger,
final EntityManager entityManager) {
super();
this.logger = logger;
this.entityManager = entityManager;
this.initializeMapping();
}
@Override
public ReportDefinition getReportDefinition() {
final ReportDefinition reportDefinition = new ReportDefinition();
reportDefinition.setIdentifier("Transactions");
reportDefinition.setName("Teller Transactions");
reportDefinition.setDescription("List all teller-cashier transactions.");
reportDefinition.setQueryParameters(this.buildQueryParameters());
reportDefinition.setDisplayableFields(this.buildDisplayableFields());
return reportDefinition;
}
@Override
public ReportPage generateReport(ReportRequest reportRequest, int pageIndex, int size) {
final ReportDefinition reportDefinition = this.getReportDefinition();
this.logger.info("Generating report {0}.", reportDefinition.getIdentifier());
final ReportPage reportPage = new ReportPage();
reportPage.setName(reportDefinition.getName());
reportPage.setDescription(reportDefinition.getDescription());
reportPage.setHeader(this.createHeader(reportRequest.getDisplayableFields()));
final Query tellerQuery = this.entityManager.createNativeQuery(this.buildTellerQuery(reportRequest, pageIndex, size));
final List<?> tellerResultList = tellerQuery.getResultList();
reportPage.setRows(this.buildRows(reportRequest, tellerResultList));
reportPage.setHasMore(
!this.entityManager.createNativeQuery(this.buildTellerQuery(reportRequest, pageIndex + 1, size))
.getResultList().isEmpty()
);
reportPage.setGeneratedBy(UserContextHolder.checkedGetUser());
reportPage.setGeneratedOn(DateConverter.toIsoString(LocalDateTime.now(Clock.systemUTC())));
return reportPage;
}
@Override
public void validate(ReportRequest reportRequest) throws IllegalArgumentException {
final ArrayList<String> unknownFields = new ArrayList<>();
reportRequest.getQueryParameters().forEach(queryParameter -> {
if (!this.allColumnMapping.keySet().contains(queryParameter.getName())) {
unknownFields.add(queryParameter.getName());
}
});
reportRequest.getDisplayableFields().forEach(displayableField -> {
if (!this.allColumnMapping.keySet().contains(displayableField.getName())) {
unknownFields.add(displayableField.getName());
}
});
if (!unknownFields.isEmpty()) {
throw new IllegalArgumentException(
"Unspecified fields requested: " + unknownFields.stream().collect(Collectors.joining(", "))
);
}
}
private void initializeMapping() {
this.tellerColumnMapping.put(TELLER_ID, "teller.id");
this.tellerColumnMapping.put(TELLER, "teller.identifier");
this.transactionColumnMapping.put(TRANSACTION_TYPE, "trx.transaction_type");
this.transactionColumnMapping.put(TRANSACTION_DATE, "trx.transaction_date");
this.transactionColumnMapping.put(CUSTOMER, "trx.customer_identifier");
this.transactionColumnMapping.put(SOURCE, "trx.customer_account_identifier");
this.transactionColumnMapping.put(TARGET, "trx.target_account_identifier");
this.transactionColumnMapping.put(CLERK, "trx.clerk");
this.transactionColumnMapping.put(AMOUNT, "trx.amount");
this.transactionColumnMapping.put(STATUS, "trx.a_state");
this.allColumnMapping.putAll(tellerColumnMapping);
this.allColumnMapping.putAll(transactionColumnMapping);
}
private Header createHeader(final List<DisplayableField> displayableFields) {
final Header header = new Header();
header.setColumnNames(
displayableFields
.stream()
.map(DisplayableField::getName)
.collect(Collectors.toList())
);
return header;
}
private List<Row> buildRows(final ReportRequest reportRequest, final List<?> tellerResultList) {
final ArrayList<Row> rows = new ArrayList<>();
tellerResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
final String tellerIdentifier;
if (result instanceof Object[]) {
final Object[] resultValues = (Object[]) result;
tellerIdentifier = resultValues[0].toString();
for (final Object resultValue : resultValues) {
final Value value = new Value();
if (resultValue != null) {
value.setValues(new String[]{resultValue.toString()});
} else {
value.setValues(new String[]{});
}
row.getValues().add(value);
}
} else {
tellerIdentifier = result.toString();
final Value value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
final String transactionQueryString = this.buildTellerTransactionQuery(reportRequest, tellerIdentifier);
final Query transactionQuery = this.entityManager.createNativeQuery(transactionQueryString);
final List<?> resultList = transactionQuery.getResultList();
final ArrayList<String> transactionType = new ArrayList<>();
final ArrayList<String> transactionDate = new ArrayList<>();
final ArrayList<String> customer = new ArrayList<>();
final ArrayList<String> source = new ArrayList<>();
final ArrayList<String> target = new ArrayList<>();
final ArrayList<String> clerk = new ArrayList<>();
final ArrayList<String> amount = new ArrayList<>();
final ArrayList<String> status = new ArrayList<>();
resultList.forEach(transaction -> {
final Object[] transactionValue = (Object[]) transaction;
for (int i = 0; i < transactionValue.length; i++) {
if (i == 0 && transactionValue[0] != null) {
transactionType.add(transactionValue[0].toString());
}
if (i == 1 && transactionValue[1] != null) {
transactionDate.add(transactionValue[1].toString());
}
if (i == 2 && transactionValue[2] != null) {
customer.add(transactionValue[2].toString());
}
if (i == 3 && transactionValue[3] != null) {
source.add(transactionValue[3].toString());
}
if (i == 4 && transactionValue[4] != null) {
target.add(transactionValue[4].toString());
}
if (i == 5 && transactionValue[5] != null) {
clerk.add(transactionValue[5].toString());
}
if (i == 6 && transactionValue[6] != null) {
amount.add(transactionValue[6].toString());
}
if (i == 7 && transactionValue[7] != null) {
status.add(transactionValue[7].toString());
}
}
}
);
final Value transactionTypeValue = new Value();
transactionTypeValue.setValues(transactionType.toArray(new String[transactionType.size()]));
row.getValues().add(transactionTypeValue);
final Value transactionDateValue = new Value();
transactionDateValue.setValues(transactionDate.toArray(new String[transactionDate.size()]));
row.getValues().add(transactionDateValue);
final Value customerValue = new Value();
customerValue.setValues(customer.toArray(new String[customer.size()]));
row.getValues().add(customerValue);
final Value sourceValue = new Value();
sourceValue.setValues(source.toArray(new String[source.size()]));
row.getValues().add(sourceValue);
final Value targetValue = new Value();
targetValue.setValues(target.toArray(new String[target.size()]));
row.getValues().add(targetValue);
final Value clerkValue = new Value();
clerkValue.setValues(clerk.toArray(new String[clerk.size()]));
row.getValues().add(clerkValue);
final Value amountValue = new Value();
amountValue.setValues(amount.toArray(new String[amount.size()]));
row.getValues().add(amountValue);
final Value statusValue = new Value();
statusValue.setValues(status.toArray(new String[status.size()]));
row.getValues().add(statusValue);
rows.add(row);
});
return rows;
}
private List<DisplayableField> buildDisplayableFields() {
return Arrays.asList(
DisplayableFieldBuilder.create(TELLER_ID, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(TELLER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(TRANSACTION_TYPE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(TRANSACTION_DATE, Type.DATE).mandatory().build(),
DisplayableFieldBuilder.create(CUSTOMER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(SOURCE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(TARGET, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(CLERK, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(AMOUNT, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(STATUS, Type.TEXT).mandatory().build()
);
}
private List<QueryParameter> buildQueryParameters() {
return Arrays.asList(
QueryParameterBuilder.create(TRANSACTION_DATE, Type.DATE).operator(QueryParameter.Operator.BETWEEN).build(),
QueryParameterBuilder.create(STATUS, Type.TEXT).operator(QueryParameter.Operator.IN).build()
);
}
private String buildTellerQuery(ReportRequest reportRequest, int pageIndex, int size) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.tellerColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("tajet_teller teller ");
query.append(" ORDER BY teller.id");
query.append(" LIMIT ");
query.append(size);
if (pageIndex > 0) {
query.append(" OFFSET ");
query.append(size * pageIndex);
}
return query.toString();
}
private String buildTellerTransactionQuery(final ReportRequest reportRequest, final String tellerIdentifier) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.transactionColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("tajet_teller_transactions trx " +
"LEFT JOIN tajet_teller teller on trx.teller_id = teller.id ");
query.append("WHERE teller.id ='" + tellerIdentifier + "'");
final List<QueryParameter> queryParameters = reportRequest.getQueryParameters();
if (!queryParameters.isEmpty()) {
final ArrayList<String> criteria = new ArrayList<>();
queryParameters.forEach(queryParameter -> {
if (queryParameter.getValue() != null && !queryParameter.getValue().isEmpty()) {
criteria.add(
CriteriaBuilder.buildCriteria(this.transactionColumnMapping.get(queryParameter.getName()), queryParameter)
);
}
});
if (!criteria.isEmpty()) {
query.append(" AND ");
query.append(criteria.stream().collect(Collectors.joining(" AND ")));
}
}
return query.toString();
}
}
| 6,098 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/specification/BalanceSheetReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.specification;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Header;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.api.v1.domain.Row;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import org.apache.fineract.cn.reporting.api.v1.domain.Value;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.DisplayableFieldBuilder;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.math.BigDecimal;
import java.time.Clock;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.fineract.cn.api.util.UserContextHolder;
import org.apache.fineract.cn.lang.DateConverter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Qualifier;
@Report(category = "Accounting", identifier = "Balancesheet")
public class BalanceSheetReportSpecification implements ReportSpecification {
private static final String DATE_RANGE = "Date range";
private static final String TYPE = "Type";
private static final String IDENTIFIER = "Identifier";
private static final String NAME = "Name";
private static final String BALANCE = "Balance";
private final Logger logger;
private final EntityManager entityManager;
private final HashMap<String, String> accountingColumnMapping = new HashMap<>();
private final HashMap<String, String> allColumnMapping = new HashMap<>();
public BalanceSheetReportSpecification(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger,
final EntityManager entityManager){
super();
this.logger = logger;
this.entityManager = entityManager;
this.initializeMapping();
}
@Override
public ReportDefinition getReportDefinition() {
final ReportDefinition reportDefinition = new ReportDefinition();
reportDefinition.setIdentifier("Balancesheet");
reportDefinition.setName("Balance Sheet");
reportDefinition.setDescription("Balance Sheet Report");
reportDefinition.setQueryParameters(this.buildQueryParameters());
reportDefinition.setDisplayableFields(this.buildDisplayableFields());
return reportDefinition;
}
@Override
public ReportPage generateReport(ReportRequest reportRequest, int pageIndex, int size) {
final ReportDefinition reportDefinition = this.getReportDefinition();
this.logger.info("Generating report {0}.", reportDefinition.getIdentifier());
final ReportPage reportPage = new ReportPage();
reportPage.setName(reportDefinition.getName());
reportPage.setDescription(reportDefinition.getDescription());
reportPage.setHeader(this.createHeader(reportRequest.getDisplayableFields()));
final Query accountQuery = this.entityManager.createNativeQuery(this.buildAssetQuery(reportRequest, pageIndex, size));
final List<?> accountResultList = accountQuery.getResultList();
reportPage.setRows(this.buildRows(reportRequest, accountResultList));
reportPage.setHasMore(
!this.entityManager.createNativeQuery(this.buildAssetQuery(reportRequest, pageIndex + 1, size))
.getResultList().isEmpty()
);
reportPage.setGeneratedBy(UserContextHolder.checkedGetUser());
reportPage.setGeneratedOn(DateConverter.toIsoString(LocalDateTime.now(Clock.systemUTC())));
return reportPage;
}
@Override
public void validate(ReportRequest reportRequest) throws IllegalArgumentException {
final ArrayList<String> unknownFields = new ArrayList<>();
reportRequest.getQueryParameters().forEach(queryParameter -> {
if (!this.allColumnMapping.keySet().contains(queryParameter.getName())) {
unknownFields.add(queryParameter.getName());
}
});
reportRequest.getDisplayableFields().forEach(displayableField -> {
if (!this.allColumnMapping.keySet().contains(displayableField.getName())) {
unknownFields.add(displayableField.getName());
}
});
if (!unknownFields.isEmpty()) {
throw new IllegalArgumentException(
"Unspecified fields requested: " + unknownFields.stream().collect(Collectors.joining(", "))
);
}
}
private void initializeMapping() {
this.accountingColumnMapping.put(DATE_RANGE, "acc.created_on");
this.accountingColumnMapping.put(TYPE, "acc.a_type");
this.accountingColumnMapping.put(IDENTIFIER, "acc.identifier");
this.accountingColumnMapping.put(NAME, "acc.a_name");
this.accountingColumnMapping.put(BALANCE, "acc.balance");
this.allColumnMapping.putAll(accountingColumnMapping);
}
private Header createHeader(List<DisplayableField> displayableFields) {
final Header header = new Header();
header.setColumnNames(
displayableFields
.stream()
.map(DisplayableField::getName)
.collect(Collectors.toList())
);
return header;
}
private List<Row> buildRows(ReportRequest reportRequest, List<?> accountResultList) {
final ArrayList<Row> rows = new ArrayList<>();
final Row totalAssetRow = new Row();
totalAssetRow.setValues(new ArrayList<>());
final Value subAssetTotal = new Value();
final BigDecimal[] assetSubTotal = {new BigDecimal("0.000")};
accountResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
if (result instanceof Object[]) {
final Object[] resultValues;
resultValues = (Object[]) result;
for (int i = 0; i < resultValues.length; i++){
final Value assetValue = new Value();
if (resultValues[i] != null){
assetValue.setValues(new String[]{resultValues[i].toString()});
}else assetValue.setValues(new String[]{});
row.getValues().add(assetValue);
assetSubTotal[0] = assetSubTotal[0].add((BigDecimal)resultValues[3]);
}
} else {
final Value value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
rows.add(row);
});
subAssetTotal.setValues(new String[]{new StringBuilder().append("TOTAL ASSETS ").append(assetSubTotal[0]).toString()});
totalAssetRow.getValues().add(subAssetTotal);
rows.add(totalAssetRow);
final String liabilityQueryString = this.buildLiabilityQuery(reportRequest);
final Query liabilityQuery = this.entityManager.createNativeQuery(liabilityQueryString);
final List<?> liabilityResultList = liabilityQuery.getResultList();
final Row totalLiabilityRow = new Row();
totalLiabilityRow.setValues(new ArrayList<>());
final Value subLiabilityTotal = new Value();
final BigDecimal[] liabilitySubTotal = {new BigDecimal("0.000")};
liabilityResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
if (result instanceof Object[]) {
final Object[] resultValues;
resultValues = (Object[]) result;
for (int i = 0; i < resultValues.length; i++){
final Value liabilityValue = new Value();
if (resultValues[i] != null) liabilityValue.setValues(new String[]{resultValues[i].toString()});
else liabilityValue.setValues(new String[]{});
row.getValues().add(liabilityValue);
liabilitySubTotal[0] = liabilitySubTotal[0].add((BigDecimal)resultValues[3]);
}
} else {
final Value value;
value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
rows.add(row);
});
subLiabilityTotal.setValues(new String[]{new StringBuilder().append("TOTAL LIABILITIES ").append(liabilitySubTotal[0]).toString()});
totalLiabilityRow.getValues().add(subLiabilityTotal);
rows.add(totalLiabilityRow);
final String equityQueryString = this.buildEquityQuery(reportRequest);
final Query equityQuery = this.entityManager.createNativeQuery(equityQueryString);
final List<?> equityResultList = equityQuery.getResultList();
final Row totalEquityRow = new Row();
totalEquityRow.setValues(new ArrayList<>());
final Value subEquityTotal = new Value();
final Row totalLiabilityAndEquityRow = new Row();
totalLiabilityAndEquityRow.setValues(new ArrayList<>());
final Value totalLiabilityAndEquityValue = new Value();
final BigDecimal[] equitySubTotal = {new BigDecimal("0.000")};
equityResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
if (result instanceof Object[]) {
final Object[] resultValues;
resultValues = (Object[]) result;
for (int i = 0; i < resultValues.length; i++){
final Value equityValue = new Value();
if (resultValues[i] != null) equityValue.setValues(new String[]{resultValues[i].toString()});
else equityValue.setValues(new String[]{});
row.getValues().add(equityValue);
equitySubTotal[0] = equitySubTotal[0].add((BigDecimal)resultValues[3]);
}
} else {
final Value value;
value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
rows.add(row);
});
subEquityTotal.setValues(new String[]{new StringBuilder().append("TOTAL EQUITY ").append(equitySubTotal[0]).toString()});
totalEquityRow.getValues().add(subEquityTotal);
rows.add(totalEquityRow);
final BigDecimal liabilityAndEquity = liabilitySubTotal[0].add(equitySubTotal[0]);
totalLiabilityAndEquityValue.setValues(new String[]{new StringBuilder().append("TOTAL LIABILITIES and EQUITY ").append(liabilityAndEquity).toString()});
totalLiabilityAndEquityRow.getValues().add(totalLiabilityAndEquityValue);
rows.add(totalLiabilityAndEquityRow);
return rows;
}
private String buildAssetQuery(final ReportRequest reportRequest, int pageIndex, int size) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.accountingColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("thoth_accounts acc ")
.append("WHERE acc.a_type = 'ASSET' ");
query.append(" ORDER BY acc.identifier");
return query.toString();
}
private String buildLiabilityQuery(final ReportRequest reportRequest) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.accountingColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("thoth_accounts acc ")
.append("WHERE acc.a_type = 'LIABILITY' ");
query.append(" ORDER BY acc.identifier");
return query.toString();
}
private String buildEquityQuery(final ReportRequest reportRequest) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.accountingColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("thoth_accounts acc ")
.append("WHERE acc.a_type = 'EQUITY' ");
query.append(" ORDER BY acc.identifier");
return query.toString();
}
private List<DisplayableField> buildDisplayableFields() {
return Arrays.asList(
DisplayableFieldBuilder.create(TYPE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(IDENTIFIER, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(NAME, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(BALANCE, Type.TEXT).mandatory().build()
);
}
private List<QueryParameter> buildQueryParameters() {
return Arrays.asList();
}
}
| 6,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.