comment stringlengths 1 45k | method_body stringlengths 23 281k | target_code stringlengths 0 5.16k | method_body_after stringlengths 12 281k | context_before stringlengths 8 543k | context_after stringlengths 8 543k |
|---|---|---|---|---|---|
Please check current design , it is more seem less in respect to customer expectation , code will only create resource if it is not present , and only delete newly created resource and not the existing | void shutdown() {
cosmosAsyncContainer.delete().block();
logger.info("Deleted test container {}" , this.configuration.getCollectionId());
cosmosClient.close();
} | cosmosAsyncContainer.delete().block(); | void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (this.collectionCreated) {
cosmosAsyncContainer.delete().block();
logger.info("Deleted temporary collection {} created for this test", this.configuration.getCollectionId());
}
cosmosClient.close();
} | class AsyncBenchmark<T> {
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final ScheduledReporter reporter;
private Meter successMeter;
private Meter failureMeter;
final Logger logger;
final CosmosAsyncClient cosmosClient;
final CosmosAsyncContainer cosmosAsyncContainer;
final CosmosAsyncDatabase cosmosAsyncDatabase;
final String partitionKey;
final Configuration configuration;
final List<PojoizedJson> docsToRead;
final Semaphore concurrencyControlSemaphore;
Timer latency;
AsyncBenchmark(Configuration cfg) {
cosmosClient = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.connectionPolicy(cfg.getConnectionPolicy())
.consistencyLevel(cfg.getConsistencyLevel())
.buildAsyncClient();
configuration = cfg;
cosmosAsyncDatabase = cosmosClient.createDatabaseIfNotExists(this.configuration.getDatabaseId()).block().getDatabase();
cosmosAsyncContainer =
cosmosAsyncDatabase.createContainerIfNotExists(configuration.getCollectionId(), Configuration.PARTITION_KEY, configuration.getThroughput()).block().getContainer();
logger = LoggerFactory.getLogger(this.getClass());
partitionKey = cosmosAsyncContainer.read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
if (configuration.getOperationType() != Configuration.Operation.WriteLatency
&& configuration.getOperationType() != Configuration.Operation.WriteThroughput
&& configuration.getOperationType() != Configuration.Operation.ReadMyWrites) {
String dataFieldValue = RandomStringUtils.randomAlphabetic(cfg.getDocumentDataFieldSize());
for (int i = 0; i < cfg.getNumberOfPreCreatedDocuments(); i++) {
String uuid = UUID.randomUUID().toString();
PojoizedJson newDoc = generateDocument(uuid, dataFieldValue);
Flux<PojoizedJson> obs = cosmosAsyncContainer.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}
).flux();
createDocumentObservables.add(obs);
}
}
docsToRead = Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block();
init();
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(configuration.getGraphiteEndpoint(), configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry).convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS).build();
}
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
}
protected void init() {
}
protected void onSuccess() {
}
protected void onError(Throwable throwable) {
}
protected abstract void performWorkload(BaseSubscriber<T> baseSubscriber, long i) throws Exception;
private boolean shouldContinue(long startTimeMillis, long iterationCount) {
Duration maxDurationTime = configuration.getMaxRunningTimeDuration();
int maxNumberOfOperations = configuration.getNumberOfOperations();
if (maxDurationTime == null) {
return iterationCount < maxNumberOfOperations;
}
if (startTimeMillis + maxDurationTime.toMillis() < System.currentTimeMillis()) {
return false;
}
if (maxNumberOfOperations < 0) {
return true;
}
return iterationCount < maxNumberOfOperations;
}
void run() throws Exception {
successMeter = metricsRegistry.meter("
failureMeter = metricsRegistry.meter("
switch (configuration.getOperationType()) {
case ReadLatency:
case WriteLatency:
case QueryInClauseParallel:
case QueryCross:
case QuerySingle:
case QuerySingleMany:
case QueryParallel:
case QueryOrderby:
case QueryAggregate:
case QueryAggregateTopOrderby:
case QueryTopOrderby:
case Mixed:
latency = metricsRegistry.timer("Latency");
break;
default:
break;
}
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
for ( i = 0; shouldContinue(startTime, i); i++) {
BaseSubscriber<T> baseSubscriber = new BaseSubscriber<T>() {
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
logger.debug("hookOnNext: {}, count:{}", value, count.get());
}
@Override
protected void hookOnCancel() {
this.hookOnError(new CancellationException());
}
@Override
protected void hookOnComplete() {
successMeter.mark();
concurrencyControlSemaphore.release();
AsyncBenchmark.this.onSuccess();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
@Override
protected void hookOnError(Throwable throwable) {
failureMeter.mark();
logger.error("Encountered failure {} on thread {}" ,
throwable.getMessage(), Thread.currentThread().getName(), throwable);
concurrencyControlSemaphore.release();
AsyncBenchmark.this.onError(throwable);
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
};
performWorkload(baseSubscriber, i);
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
public PojoizedJson generateDocument(String idString, String dataFieldValue) {
PojoizedJson instance = new PojoizedJson();
Map<String, String> properties = instance.getInstance();
properties.put("id", idString);
properties.put(partitionKey, idString);
for (int i = 0; i < configuration.getDocumentDataFieldCount(); i++) {
properties.put("dataField" + i, dataFieldValue);
}
return instance;
}
} | class AsyncBenchmark<T> {
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final ScheduledReporter reporter;
private Meter successMeter;
private Meter failureMeter;
private boolean databaseCreated;
private boolean collectionCreated;
final Logger logger;
final CosmosAsyncClient cosmosClient;
CosmosAsyncContainer cosmosAsyncContainer;
CosmosAsyncDatabase cosmosAsyncDatabase;
final String partitionKey;
final Configuration configuration;
final List<PojoizedJson> docsToRead;
final Semaphore concurrencyControlSemaphore;
Timer latency;
AsyncBenchmark(Configuration cfg) {
cosmosClient = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.connectionPolicy(cfg.getConnectionPolicy())
.consistencyLevel(cfg.getConsistencyLevel())
.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId()).read().block().getDatabase();
} catch (CosmosClientException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase = cosmosClient.createDatabase(cfg.getDatabaseId()).block().getDatabase();
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
try {
cosmosAsyncContainer = cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId()).read().block().getContainer();
} catch (CosmosClientException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncContainer =
cosmosAsyncDatabase.createContainer(this.configuration.getCollectionId(), Configuration.DEFAULT_PARTITION_KEY_PATH, this.configuration.getThroughput()).block().getContainer();
logger.info("Collection {} is created for this test", this.configuration.getCollectionId());
collectionCreated = true;
} else {
throw e;
}
}
partitionKey = cosmosAsyncContainer.read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
if (configuration.getOperationType() != Configuration.Operation.WriteLatency
&& configuration.getOperationType() != Configuration.Operation.WriteThroughput
&& configuration.getOperationType() != Configuration.Operation.ReadMyWrites) {
String dataFieldValue = RandomStringUtils.randomAlphabetic(cfg.getDocumentDataFieldSize());
for (int i = 0; i < cfg.getNumberOfPreCreatedDocuments(); i++) {
String uuid = UUID.randomUUID().toString();
PojoizedJson newDoc = generateDocument(uuid, dataFieldValue);
Flux<PojoizedJson> obs = cosmosAsyncContainer.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}
).flux();
createDocumentObservables.add(obs);
}
}
docsToRead = Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block();
init();
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(configuration.getGraphiteEndpoint(), configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry).convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS).build();
}
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
}
protected void init() {
}
protected void onSuccess() {
}
protected void onError(Throwable throwable) {
}
protected abstract void performWorkload(BaseSubscriber<T> baseSubscriber, long i) throws Exception;
private boolean shouldContinue(long startTimeMillis, long iterationCount) {
Duration maxDurationTime = configuration.getMaxRunningTimeDuration();
int maxNumberOfOperations = configuration.getNumberOfOperations();
if (maxDurationTime == null) {
return iterationCount < maxNumberOfOperations;
}
if (startTimeMillis + maxDurationTime.toMillis() < System.currentTimeMillis()) {
return false;
}
if (maxNumberOfOperations < 0) {
return true;
}
return iterationCount < maxNumberOfOperations;
}
void run() throws Exception {
successMeter = metricsRegistry.meter("
failureMeter = metricsRegistry.meter("
switch (configuration.getOperationType()) {
case ReadLatency:
case WriteLatency:
case QueryInClauseParallel:
case QueryCross:
case QuerySingle:
case QuerySingleMany:
case QueryParallel:
case QueryOrderby:
case QueryAggregate:
case QueryAggregateTopOrderby:
case QueryTopOrderby:
case Mixed:
latency = metricsRegistry.timer("Latency");
break;
default:
break;
}
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
for ( i = 0; shouldContinue(startTime, i); i++) {
BaseSubscriber<T> baseSubscriber = new BaseSubscriber<T>() {
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
logger.debug("hookOnNext: {}, count:{}", value, count.get());
}
@Override
protected void hookOnCancel() {
this.hookOnError(new CancellationException());
}
@Override
protected void hookOnComplete() {
successMeter.mark();
concurrencyControlSemaphore.release();
AsyncBenchmark.this.onSuccess();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
@Override
protected void hookOnError(Throwable throwable) {
failureMeter.mark();
logger.error("Encountered failure {} on thread {}" ,
throwable.getMessage(), Thread.currentThread().getName(), throwable);
concurrencyControlSemaphore.release();
AsyncBenchmark.this.onError(throwable);
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
};
performWorkload(baseSubscriber, i);
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
public PojoizedJson generateDocument(String idString, String dataFieldValue) {
PojoizedJson instance = new PojoizedJson();
Map<String, String> properties = instance.getInstance();
properties.put("id", idString);
properties.put(partitionKey, idString);
for (int i = 0; i < configuration.getDocumentDataFieldCount(); i++) {
properties.put("dataField" + i, dataFieldValue);
}
return instance;
}
} |
should these comments actually be bufferedNext? I'm assuming you just renamed the variable and forgot to change the comments | public Path next() {
if (this.bufferedNext == null) {
if (!this.hasNext()) {
throw LoggingUtility.logError(logger, new NoSuchElementException());
}
}
Path next = this.bufferedNext;
this.bufferedNext = null;
return next;
} | if (!this.hasNext()) { | public Path next() {
if (this.bufferedNext == null) {
if (!this.hasNext()) {
throw LoggingUtility.logError(logger, new NoSuchElementException());
}
}
Path next = this.bufferedNext;
this.bufferedNext = null;
return next;
} | class AzureDirectoryIterator implements Iterator<Path> {
private final ClientLogger logger = new ClientLogger(AzureDirectoryIterator.class);
private final AzureDirectoryStream parentStream;
private final DirectoryStream.Filter<? super Path> filter;
private final Iterator<BlobItem> blobIterator;
private final AzurePath path;
private Path bufferedNext = null;
private final Set<String> directoryPaths;
AzureDirectoryIterator(AzureDirectoryStream parentStream, AzurePath path,
DirectoryStream.Filter<? super Path> filter) throws IOException {
this.parentStream = parentStream;
this.filter = filter;
this.path = path;
directoryPaths = new HashSet<>();
BlobContainerClient containerClient;
ListBlobsOptions listOptions = new ListBlobsOptions()
.setDetails(new BlobListDetails().setRetrieveMetadata(true));
if (path.isRoot()) {
String containerName = path.toString().substring(0, path.toString().length() - 1);
containerClient = ((AzureFileSystem) path.getFileSystem()).getBlobServiceClient()
.getBlobContainerClient(containerName);
} else {
AzureResource azureResource = new AzureResource(path);
listOptions.setPrefix(azureResource.getBlobClient().getBlobName() + AzureFileSystem.PATH_SEPARATOR);
containerClient = azureResource.getContainerClient();
}
this.blobIterator = containerClient
.listBlobsByHierarchy(AzureFileSystem.PATH_SEPARATOR, listOptions, null).iterator();
}
@Override
public boolean hasNext() {
if (parentStream.closed) {
return false;
}
if (this.bufferedNext != null) {
return true;
}
/*
Search for a new element that passes the filter and buffer it when found. If no such element is found,
return false.
*/
while (this.blobIterator.hasNext()) {
BlobItem nextBlob = this.blobIterator.next();
Path nextPath = getNextListResult(nextBlob);
try {
if (this.filter.accept(nextPath) && passesDirectoryDuplicateFilter(nextPath, nextBlob)) {
this.bufferedNext = nextPath;
return true;
}
} catch (IOException e) {
throw LoggingUtility.logError(logger, new DirectoryIteratorException(e));
}
}
return false;
}
@Override
@Override
public void remove() {
throw new UnsupportedOperationException();
}
private Path getNextListResult(BlobItem blobItem) {
Path withoutRoot = this.path;
if (withoutRoot.isAbsolute()) {
withoutRoot = this.path.getRoot().relativize(this.path);
}
/*
Listing results return the full blob path, and we don't want to duplicate the path we listed off of, so
we relativize to remove it.
*/
String blobName = blobItem.getName();
Path relativeResult = withoutRoot.relativize(
this.path.getFileSystem().getPath(blobName));
return this.path.resolve(relativeResult);
}
/*
If there is a concrete directory with children, a given path will be returned twice: once as the marker blob
and once as the prefix for its children. We don't want to return the item twice, and we have no guarantees on
result ordering, so we have to maintain a cache of directory paths we've seen in order to de-dup.
*/
private boolean passesDirectoryDuplicateFilter(Path path, BlobItem blob) {
/*
If the blob is not a prefix and the blob does not contain the directory metadata marker, it is a normal blob
and therefore will not be duplicated.
*/
if (!(blob.isPrefix() != null && blob.isPrefix())
&& !(blob.getMetadata() != null && blob.getMetadata().containsKey(AzureResource.DIR_METADATA_MARKER))) {
return true;
}
if (this.directoryPaths.contains(path.toString())) {
return false;
}
this.directoryPaths.add(path.toString());
return true;
}
} | class AzureDirectoryIterator implements Iterator<Path> {
private final ClientLogger logger = new ClientLogger(AzureDirectoryIterator.class);
private final AzureDirectoryStream parentStream;
private final DirectoryStream.Filter<? super Path> filter;
private final Iterator<BlobItem> blobIterator;
private final AzurePath path;
private final Path withoutRoot;
private Path bufferedNext = null;
private final Set<String> directoryPaths;
AzureDirectoryIterator(AzureDirectoryStream parentStream, AzurePath path,
DirectoryStream.Filter<? super Path> filter) throws IOException {
this.parentStream = parentStream;
this.filter = filter;
this.path = path;
/*
Resolving two paths requires that either both have a root or neither does. Because the paths returned from
listing will never have a root, we prepare a copy of the list path without a root for quick resolving later.
*/
Path root = this.path.getRoot();
this.withoutRoot = root == null ? this.path : root.relativize(this.path);
directoryPaths = new HashSet<>();
BlobContainerClient containerClient;
ListBlobsOptions listOptions = new ListBlobsOptions()
.setDetails(new BlobListDetails().setRetrieveMetadata(true));
if (path.isRoot()) {
String containerName = path.toString().substring(0, path.toString().length() - 1);
AzureFileSystem afs = ((AzureFileSystem) path.getFileSystem());
containerClient = ((AzureFileStore) afs.getFileStore(containerName)).getContainerClient();
} else {
AzureResource azureResource = new AzureResource(path);
listOptions.setPrefix(azureResource.getBlobClient().getBlobName() + AzureFileSystem.PATH_SEPARATOR);
containerClient = azureResource.getContainerClient();
}
this.blobIterator = containerClient
.listBlobsByHierarchy(AzureFileSystem.PATH_SEPARATOR, listOptions, null).iterator();
}
@Override
public boolean hasNext() {
if (parentStream.closed) {
return false;
}
if (this.bufferedNext != null) {
return true;
}
/*
Search for a new element that passes the filter and buffer it when found. If no such element is found,
return false.
*/
while (this.blobIterator.hasNext()) {
BlobItem nextBlob = this.blobIterator.next();
Path nextPath = getNextListResult(nextBlob);
try {
if (this.filter.accept(nextPath) && isNotDuplicate(nextPath, nextBlob)) {
this.bufferedNext = nextPath;
return true;
}
} catch (IOException e) {
throw LoggingUtility.logError(logger, new DirectoryIteratorException(e));
}
}
return false;
}
@Override
@Override
public void remove() {
throw new UnsupportedOperationException();
}
private Path getNextListResult(BlobItem blobItem) {
/*
Listing results return the full blob path, and we don't want to duplicate the path we listed off of, so
we relativize to remove it.
*/
String blobName = blobItem.getName();
Path relativeResult = this.withoutRoot.relativize(
this.path.getFileSystem().getPath(blobName));
return this.path.resolve(relativeResult);
}
/*
If there is a concrete directory with children, a given path will be returned twice: once as the marker blob
and once as the prefix for its children. We don't want to return the item twice, and we have no guarantees on
result ordering, so we have to maintain a cache of directory paths we've seen in order to de-dup.
*/
private boolean isNotDuplicate(Path path, BlobItem blob) {
/*
If the blob is not a prefix and the blob does not contain the directory metadata marker, it is a normal blob
and therefore will not be duplicated.
*/
if (!(blob.isPrefix() != null && blob.isPrefix())
&& !(blob.getMetadata() != null && blob.getMetadata().containsKey(AzureResource.DIR_METADATA_MARKER))) {
return true;
}
if (this.directoryPaths.contains(path.toString())) {
return false;
}
this.directoryPaths.add(path.toString());
return true;
}
} |
is there a reason we can't just do this +next 3 lines once in the constructor for AzureDirectoryIterator? | private Path getNextListResult(BlobItem blobItem) {
Path withoutRoot = this.path;
if (withoutRoot.isAbsolute()) {
withoutRoot = this.path.getRoot().relativize(this.path);
}
/*
Listing results return the full blob path, and we don't want to duplicate the path we listed off of, so
we relativize to remove it.
*/
String blobName = blobItem.getName();
Path relativeResult = withoutRoot.relativize(
this.path.getFileSystem().getPath(blobName));
return this.path.resolve(relativeResult);
} | Path withoutRoot = this.path; | private Path getNextListResult(BlobItem blobItem) {
/*
Listing results return the full blob path, and we don't want to duplicate the path we listed off of, so
we relativize to remove it.
*/
String blobName = blobItem.getName();
Path relativeResult = this.withoutRoot.relativize(
this.path.getFileSystem().getPath(blobName));
return this.path.resolve(relativeResult);
} | class AzureDirectoryIterator implements Iterator<Path> {
private final ClientLogger logger = new ClientLogger(AzureDirectoryIterator.class);
private final AzureDirectoryStream parentStream;
private final DirectoryStream.Filter<? super Path> filter;
private final Iterator<BlobItem> blobIterator;
private final AzurePath path;
private Path bufferedNext = null;
private final Set<String> directoryPaths;
AzureDirectoryIterator(AzureDirectoryStream parentStream, AzurePath path,
DirectoryStream.Filter<? super Path> filter) throws IOException {
this.parentStream = parentStream;
this.filter = filter;
this.path = path;
directoryPaths = new HashSet<>();
BlobContainerClient containerClient;
ListBlobsOptions listOptions = new ListBlobsOptions()
.setDetails(new BlobListDetails().setRetrieveMetadata(true));
if (path.isRoot()) {
String containerName = path.toString().substring(0, path.toString().length() - 1);
containerClient = ((AzureFileSystem) path.getFileSystem()).getBlobServiceClient()
.getBlobContainerClient(containerName);
} else {
AzureResource azureResource = new AzureResource(path);
listOptions.setPrefix(azureResource.getBlobClient().getBlobName() + AzureFileSystem.PATH_SEPARATOR);
containerClient = azureResource.getContainerClient();
}
this.blobIterator = containerClient
.listBlobsByHierarchy(AzureFileSystem.PATH_SEPARATOR, listOptions, null).iterator();
}
@Override
public boolean hasNext() {
if (parentStream.closed) {
return false;
}
if (this.bufferedNext != null) {
return true;
}
/*
Search for a new element that passes the filter and buffer it when found. If no such element is found,
return false.
*/
while (this.blobIterator.hasNext()) {
BlobItem nextBlob = this.blobIterator.next();
Path nextPath = getNextListResult(nextBlob);
try {
if (this.filter.accept(nextPath) && passesDirectoryDuplicateFilter(nextPath, nextBlob)) {
this.bufferedNext = nextPath;
return true;
}
} catch (IOException e) {
throw LoggingUtility.logError(logger, new DirectoryIteratorException(e));
}
}
return false;
}
@Override
public Path next() {
if (this.bufferedNext == null) {
if (!this.hasNext()) {
throw LoggingUtility.logError(logger, new NoSuchElementException());
}
}
Path next = this.bufferedNext;
this.bufferedNext = null;
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/*
If there is a concrete directory with children, a given path will be returned twice: once as the marker blob
and once as the prefix for its children. We don't want to return the item twice, and we have no guarantees on
result ordering, so we have to maintain a cache of directory paths we've seen in order to de-dup.
*/
private boolean passesDirectoryDuplicateFilter(Path path, BlobItem blob) {
/*
If the blob is not a prefix and the blob does not contain the directory metadata marker, it is a normal blob
and therefore will not be duplicated.
*/
if (!(blob.isPrefix() != null && blob.isPrefix())
&& !(blob.getMetadata() != null && blob.getMetadata().containsKey(AzureResource.DIR_METADATA_MARKER))) {
return true;
}
if (this.directoryPaths.contains(path.toString())) {
return false;
}
this.directoryPaths.add(path.toString());
return true;
}
} | class AzureDirectoryIterator implements Iterator<Path> {
private final ClientLogger logger = new ClientLogger(AzureDirectoryIterator.class);
private final AzureDirectoryStream parentStream;
private final DirectoryStream.Filter<? super Path> filter;
private final Iterator<BlobItem> blobIterator;
private final AzurePath path;
private final Path withoutRoot;
private Path bufferedNext = null;
private final Set<String> directoryPaths;
AzureDirectoryIterator(AzureDirectoryStream parentStream, AzurePath path,
DirectoryStream.Filter<? super Path> filter) throws IOException {
this.parentStream = parentStream;
this.filter = filter;
this.path = path;
/*
Resolving two paths requires that either both have a root or neither does. Because the paths returned from
listing will never have a root, we prepare a copy of the list path without a root for quick resolving later.
*/
Path root = this.path.getRoot();
this.withoutRoot = root == null ? this.path : root.relativize(this.path);
directoryPaths = new HashSet<>();
BlobContainerClient containerClient;
ListBlobsOptions listOptions = new ListBlobsOptions()
.setDetails(new BlobListDetails().setRetrieveMetadata(true));
if (path.isRoot()) {
String containerName = path.toString().substring(0, path.toString().length() - 1);
AzureFileSystem afs = ((AzureFileSystem) path.getFileSystem());
containerClient = ((AzureFileStore) afs.getFileStore(containerName)).getContainerClient();
} else {
AzureResource azureResource = new AzureResource(path);
listOptions.setPrefix(azureResource.getBlobClient().getBlobName() + AzureFileSystem.PATH_SEPARATOR);
containerClient = azureResource.getContainerClient();
}
this.blobIterator = containerClient
.listBlobsByHierarchy(AzureFileSystem.PATH_SEPARATOR, listOptions, null).iterator();
}
@Override
public boolean hasNext() {
if (parentStream.closed) {
return false;
}
if (this.bufferedNext != null) {
return true;
}
/*
Search for a new element that passes the filter and buffer it when found. If no such element is found,
return false.
*/
while (this.blobIterator.hasNext()) {
BlobItem nextBlob = this.blobIterator.next();
Path nextPath = getNextListResult(nextBlob);
try {
if (this.filter.accept(nextPath) && isNotDuplicate(nextPath, nextBlob)) {
this.bufferedNext = nextPath;
return true;
}
} catch (IOException e) {
throw LoggingUtility.logError(logger, new DirectoryIteratorException(e));
}
}
return false;
}
@Override
public Path next() {
if (this.bufferedNext == null) {
if (!this.hasNext()) {
throw LoggingUtility.logError(logger, new NoSuchElementException());
}
}
Path next = this.bufferedNext;
this.bufferedNext = null;
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/*
If there is a concrete directory with children, a given path will be returned twice: once as the marker blob
and once as the prefix for its children. We don't want to return the item twice, and we have no guarantees on
result ordering, so we have to maintain a cache of directory paths we've seen in order to de-dup.
*/
private boolean isNotDuplicate(Path path, BlobItem blob) {
/*
If the blob is not a prefix and the blob does not contain the directory metadata marker, it is a normal blob
and therefore will not be duplicated.
*/
if (!(blob.isPrefix() != null && blob.isPrefix())
&& !(blob.getMetadata() != null && blob.getMetadata().containsKey(AzureResource.DIR_METADATA_MARKER))) {
return true;
}
if (this.directoryPaths.contains(path.toString())) {
return false;
}
this.directoryPaths.add(path.toString());
return true;
}
} |
nit. should this be called !isDuplicate ? or something like that | public boolean hasNext() {
if (parentStream.closed) {
return false;
}
if (this.bufferedNext != null) {
return true;
}
/*
Search for a new element that passes the filter and buffer it when found. If no such element is found,
return false.
*/
while (this.blobIterator.hasNext()) {
BlobItem nextBlob = this.blobIterator.next();
Path nextPath = getNextListResult(nextBlob);
try {
if (this.filter.accept(nextPath) && passesDirectoryDuplicateFilter(nextPath, nextBlob)) {
this.bufferedNext = nextPath;
return true;
}
} catch (IOException e) {
throw LoggingUtility.logError(logger, new DirectoryIteratorException(e));
}
}
return false;
} | if (this.filter.accept(nextPath) && passesDirectoryDuplicateFilter(nextPath, nextBlob)) { | public boolean hasNext() {
if (parentStream.closed) {
return false;
}
if (this.bufferedNext != null) {
return true;
}
/*
Search for a new element that passes the filter and buffer it when found. If no such element is found,
return false.
*/
while (this.blobIterator.hasNext()) {
BlobItem nextBlob = this.blobIterator.next();
Path nextPath = getNextListResult(nextBlob);
try {
if (this.filter.accept(nextPath) && isNotDuplicate(nextPath, nextBlob)) {
this.bufferedNext = nextPath;
return true;
}
} catch (IOException e) {
throw LoggingUtility.logError(logger, new DirectoryIteratorException(e));
}
}
return false;
} | class AzureDirectoryIterator implements Iterator<Path> {
private final ClientLogger logger = new ClientLogger(AzureDirectoryIterator.class);
private final AzureDirectoryStream parentStream;
private final DirectoryStream.Filter<? super Path> filter;
private final Iterator<BlobItem> blobIterator;
private final AzurePath path;
private Path bufferedNext = null;
private final Set<String> directoryPaths;
AzureDirectoryIterator(AzureDirectoryStream parentStream, AzurePath path,
DirectoryStream.Filter<? super Path> filter) throws IOException {
this.parentStream = parentStream;
this.filter = filter;
this.path = path;
directoryPaths = new HashSet<>();
BlobContainerClient containerClient;
ListBlobsOptions listOptions = new ListBlobsOptions()
.setDetails(new BlobListDetails().setRetrieveMetadata(true));
if (path.isRoot()) {
String containerName = path.toString().substring(0, path.toString().length() - 1);
containerClient = ((AzureFileSystem) path.getFileSystem()).getBlobServiceClient()
.getBlobContainerClient(containerName);
} else {
AzureResource azureResource = new AzureResource(path);
listOptions.setPrefix(azureResource.getBlobClient().getBlobName() + AzureFileSystem.PATH_SEPARATOR);
containerClient = azureResource.getContainerClient();
}
this.blobIterator = containerClient
.listBlobsByHierarchy(AzureFileSystem.PATH_SEPARATOR, listOptions, null).iterator();
}
@Override
@Override
public Path next() {
if (this.bufferedNext == null) {
if (!this.hasNext()) {
throw LoggingUtility.logError(logger, new NoSuchElementException());
}
}
Path next = this.bufferedNext;
this.bufferedNext = null;
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
private Path getNextListResult(BlobItem blobItem) {
Path withoutRoot = this.path;
if (withoutRoot.isAbsolute()) {
withoutRoot = this.path.getRoot().relativize(this.path);
}
/*
Listing results return the full blob path, and we don't want to duplicate the path we listed off of, so
we relativize to remove it.
*/
String blobName = blobItem.getName();
Path relativeResult = withoutRoot.relativize(
this.path.getFileSystem().getPath(blobName));
return this.path.resolve(relativeResult);
}
/*
If there is a concrete directory with children, a given path will be returned twice: once as the marker blob
and once as the prefix for its children. We don't want to return the item twice, and we have no guarantees on
result ordering, so we have to maintain a cache of directory paths we've seen in order to de-dup.
*/
private boolean passesDirectoryDuplicateFilter(Path path, BlobItem blob) {
/*
If the blob is not a prefix and the blob does not contain the directory metadata marker, it is a normal blob
and therefore will not be duplicated.
*/
if (!(blob.isPrefix() != null && blob.isPrefix())
&& !(blob.getMetadata() != null && blob.getMetadata().containsKey(AzureResource.DIR_METADATA_MARKER))) {
return true;
}
if (this.directoryPaths.contains(path.toString())) {
return false;
}
this.directoryPaths.add(path.toString());
return true;
}
} | class AzureDirectoryIterator implements Iterator<Path> {
private final ClientLogger logger = new ClientLogger(AzureDirectoryIterator.class);
private final AzureDirectoryStream parentStream;
private final DirectoryStream.Filter<? super Path> filter;
private final Iterator<BlobItem> blobIterator;
private final AzurePath path;
private final Path withoutRoot;
private Path bufferedNext = null;
private final Set<String> directoryPaths;
AzureDirectoryIterator(AzureDirectoryStream parentStream, AzurePath path,
DirectoryStream.Filter<? super Path> filter) throws IOException {
this.parentStream = parentStream;
this.filter = filter;
this.path = path;
/*
Resolving two paths requires that either both have a root or neither does. Because the paths returned from
listing will never have a root, we prepare a copy of the list path without a root for quick resolving later.
*/
Path root = this.path.getRoot();
this.withoutRoot = root == null ? this.path : root.relativize(this.path);
directoryPaths = new HashSet<>();
BlobContainerClient containerClient;
ListBlobsOptions listOptions = new ListBlobsOptions()
.setDetails(new BlobListDetails().setRetrieveMetadata(true));
if (path.isRoot()) {
String containerName = path.toString().substring(0, path.toString().length() - 1);
AzureFileSystem afs = ((AzureFileSystem) path.getFileSystem());
containerClient = ((AzureFileStore) afs.getFileStore(containerName)).getContainerClient();
} else {
AzureResource azureResource = new AzureResource(path);
listOptions.setPrefix(azureResource.getBlobClient().getBlobName() + AzureFileSystem.PATH_SEPARATOR);
containerClient = azureResource.getContainerClient();
}
this.blobIterator = containerClient
.listBlobsByHierarchy(AzureFileSystem.PATH_SEPARATOR, listOptions, null).iterator();
}
@Override
@Override
public Path next() {
if (this.bufferedNext == null) {
if (!this.hasNext()) {
throw LoggingUtility.logError(logger, new NoSuchElementException());
}
}
Path next = this.bufferedNext;
this.bufferedNext = null;
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
private Path getNextListResult(BlobItem blobItem) {
/*
Listing results return the full blob path, and we don't want to duplicate the path we listed off of, so
we relativize to remove it.
*/
String blobName = blobItem.getName();
Path relativeResult = this.withoutRoot.relativize(
this.path.getFileSystem().getPath(blobName));
return this.path.resolve(relativeResult);
}
/*
If there is a concrete directory with children, a given path will be returned twice: once as the marker blob
and once as the prefix for its children. We don't want to return the item twice, and we have no guarantees on
result ordering, so we have to maintain a cache of directory paths we've seen in order to de-dup.
*/
private boolean isNotDuplicate(Path path, BlobItem blob) {
/*
If the blob is not a prefix and the blob does not contain the directory metadata marker, it is a normal blob
and therefore will not be duplicated.
*/
if (!(blob.isPrefix() != null && blob.isPrefix())
&& !(blob.getMetadata() != null && blob.getMetadata().containsKey(AzureResource.DIR_METADATA_MARKER))) {
return true;
}
if (this.directoryPaths.contains(path.toString())) {
return false;
}
this.directoryPaths.add(path.toString());
return true;
}
} |
Ooo probably. Save doing it every time. Good thought! | private Path getNextListResult(BlobItem blobItem) {
Path withoutRoot = this.path;
if (withoutRoot.isAbsolute()) {
withoutRoot = this.path.getRoot().relativize(this.path);
}
/*
Listing results return the full blob path, and we don't want to duplicate the path we listed off of, so
we relativize to remove it.
*/
String blobName = blobItem.getName();
Path relativeResult = withoutRoot.relativize(
this.path.getFileSystem().getPath(blobName));
return this.path.resolve(relativeResult);
} | Path withoutRoot = this.path; | private Path getNextListResult(BlobItem blobItem) {
/*
Listing results return the full blob path, and we don't want to duplicate the path we listed off of, so
we relativize to remove it.
*/
String blobName = blobItem.getName();
Path relativeResult = this.withoutRoot.relativize(
this.path.getFileSystem().getPath(blobName));
return this.path.resolve(relativeResult);
} | class AzureDirectoryIterator implements Iterator<Path> {
private final ClientLogger logger = new ClientLogger(AzureDirectoryIterator.class);
private final AzureDirectoryStream parentStream;
private final DirectoryStream.Filter<? super Path> filter;
private final Iterator<BlobItem> blobIterator;
private final AzurePath path;
private Path bufferedNext = null;
private final Set<String> directoryPaths;
AzureDirectoryIterator(AzureDirectoryStream parentStream, AzurePath path,
DirectoryStream.Filter<? super Path> filter) throws IOException {
this.parentStream = parentStream;
this.filter = filter;
this.path = path;
directoryPaths = new HashSet<>();
BlobContainerClient containerClient;
ListBlobsOptions listOptions = new ListBlobsOptions()
.setDetails(new BlobListDetails().setRetrieveMetadata(true));
if (path.isRoot()) {
String containerName = path.toString().substring(0, path.toString().length() - 1);
containerClient = ((AzureFileSystem) path.getFileSystem()).getBlobServiceClient()
.getBlobContainerClient(containerName);
} else {
AzureResource azureResource = new AzureResource(path);
listOptions.setPrefix(azureResource.getBlobClient().getBlobName() + AzureFileSystem.PATH_SEPARATOR);
containerClient = azureResource.getContainerClient();
}
this.blobIterator = containerClient
.listBlobsByHierarchy(AzureFileSystem.PATH_SEPARATOR, listOptions, null).iterator();
}
@Override
public boolean hasNext() {
if (parentStream.closed) {
return false;
}
if (this.bufferedNext != null) {
return true;
}
/*
Search for a new element that passes the filter and buffer it when found. If no such element is found,
return false.
*/
while (this.blobIterator.hasNext()) {
BlobItem nextBlob = this.blobIterator.next();
Path nextPath = getNextListResult(nextBlob);
try {
if (this.filter.accept(nextPath) && passesDirectoryDuplicateFilter(nextPath, nextBlob)) {
this.bufferedNext = nextPath;
return true;
}
} catch (IOException e) {
throw LoggingUtility.logError(logger, new DirectoryIteratorException(e));
}
}
return false;
}
@Override
public Path next() {
if (this.bufferedNext == null) {
if (!this.hasNext()) {
throw LoggingUtility.logError(logger, new NoSuchElementException());
}
}
Path next = this.bufferedNext;
this.bufferedNext = null;
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/*
If there is a concrete directory with children, a given path will be returned twice: once as the marker blob
and once as the prefix for its children. We don't want to return the item twice, and we have no guarantees on
result ordering, so we have to maintain a cache of directory paths we've seen in order to de-dup.
*/
private boolean passesDirectoryDuplicateFilter(Path path, BlobItem blob) {
/*
If the blob is not a prefix and the blob does not contain the directory metadata marker, it is a normal blob
and therefore will not be duplicated.
*/
if (!(blob.isPrefix() != null && blob.isPrefix())
&& !(blob.getMetadata() != null && blob.getMetadata().containsKey(AzureResource.DIR_METADATA_MARKER))) {
return true;
}
if (this.directoryPaths.contains(path.toString())) {
return false;
}
this.directoryPaths.add(path.toString());
return true;
}
} | class AzureDirectoryIterator implements Iterator<Path> {
private final ClientLogger logger = new ClientLogger(AzureDirectoryIterator.class);
private final AzureDirectoryStream parentStream;
private final DirectoryStream.Filter<? super Path> filter;
private final Iterator<BlobItem> blobIterator;
private final AzurePath path;
private final Path withoutRoot;
private Path bufferedNext = null;
private final Set<String> directoryPaths;
AzureDirectoryIterator(AzureDirectoryStream parentStream, AzurePath path,
DirectoryStream.Filter<? super Path> filter) throws IOException {
this.parentStream = parentStream;
this.filter = filter;
this.path = path;
/*
Resolving two paths requires that either both have a root or neither does. Because the paths returned from
listing will never have a root, we prepare a copy of the list path without a root for quick resolving later.
*/
Path root = this.path.getRoot();
this.withoutRoot = root == null ? this.path : root.relativize(this.path);
directoryPaths = new HashSet<>();
BlobContainerClient containerClient;
ListBlobsOptions listOptions = new ListBlobsOptions()
.setDetails(new BlobListDetails().setRetrieveMetadata(true));
if (path.isRoot()) {
String containerName = path.toString().substring(0, path.toString().length() - 1);
AzureFileSystem afs = ((AzureFileSystem) path.getFileSystem());
containerClient = ((AzureFileStore) afs.getFileStore(containerName)).getContainerClient();
} else {
AzureResource azureResource = new AzureResource(path);
listOptions.setPrefix(azureResource.getBlobClient().getBlobName() + AzureFileSystem.PATH_SEPARATOR);
containerClient = azureResource.getContainerClient();
}
this.blobIterator = containerClient
.listBlobsByHierarchy(AzureFileSystem.PATH_SEPARATOR, listOptions, null).iterator();
}
@Override
public boolean hasNext() {
if (parentStream.closed) {
return false;
}
if (this.bufferedNext != null) {
return true;
}
/*
Search for a new element that passes the filter and buffer it when found. If no such element is found,
return false.
*/
while (this.blobIterator.hasNext()) {
BlobItem nextBlob = this.blobIterator.next();
Path nextPath = getNextListResult(nextBlob);
try {
if (this.filter.accept(nextPath) && isNotDuplicate(nextPath, nextBlob)) {
this.bufferedNext = nextPath;
return true;
}
} catch (IOException e) {
throw LoggingUtility.logError(logger, new DirectoryIteratorException(e));
}
}
return false;
}
@Override
public Path next() {
if (this.bufferedNext == null) {
if (!this.hasNext()) {
throw LoggingUtility.logError(logger, new NoSuchElementException());
}
}
Path next = this.bufferedNext;
this.bufferedNext = null;
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/*
If there is a concrete directory with children, a given path will be returned twice: once as the marker blob
and once as the prefix for its children. We don't want to return the item twice, and we have no guarantees on
result ordering, so we have to maintain a cache of directory paths we've seen in order to de-dup.
*/
private boolean isNotDuplicate(Path path, BlobItem blob) {
/*
If the blob is not a prefix and the blob does not contain the directory metadata marker, it is a normal blob
and therefore will not be duplicated.
*/
if (!(blob.isPrefix() != null && blob.isPrefix())
&& !(blob.getMetadata() != null && blob.getMetadata().containsKey(AzureResource.DIR_METADATA_MARKER))) {
return true;
}
if (this.directoryPaths.contains(path.toString())) {
return false;
}
this.directoryPaths.add(path.toString());
return true;
}
} |
not particularly picky about this: but since we're now throwing on finding the decryption policy, could we just throw an exception in the for loop when we encounter it? That way we dont need to loop over all the rest of the policies if the first policy was a decryption policy? | private HttpPipeline getHttpPipeline() {
if (httpPipeline != null) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
boolean decryptionPolicyPresent = false;
for (int i = 0; i < httpPipeline.getPolicyCount(); i++) {
HttpPipelinePolicy currPolicy = httpPipeline.getPolicy(i);
decryptionPolicyPresent |= currPolicy instanceof BlobDecryptionPolicy;
policies.add(currPolicy);
}
if (!decryptionPolicyPresent) {
policies.add(0, new BlobDecryptionPolicy(keyWrapper, keyResolver));
} else {
throw new IllegalArgumentException("The passed pipeline was already configured for "
+ "encryption/decryption in a way that might conflict with the passed key information. Please "
+ "ensure that the passed pipeline is not already configured for encryption/decryption");
}
return new HttpPipelineBuilder()
.httpClient(httpPipeline.getHttpClient())
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
Configuration userAgentConfiguration = (configuration == null) ? Configuration.NONE : configuration;
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new BlobDecryptionPolicy(keyWrapper, keyResolver));
String clientName = USER_AGENT_PROPERTIES.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = USER_AGENT_PROPERTIES.getOrDefault(SDK_VERSION, "UnknownVersion");
policies.add(new UserAgentPolicy(logOptions.getApplicationId(), clientName, clientVersion,
userAgentConfiguration));
policies.add(new RequestIdPolicy());
policies.add(new AddDatePolicy());
if (storageSharedKeyCredential != null) {
policies.add(new StorageSharedKeyCredentialPolicy(storageSharedKeyCredential));
} else if (tokenCredential != null) {
BuilderHelper.httpsValidation(tokenCredential, "bearer token", endpoint, logger);
policies.add(new BearerTokenAuthenticationPolicy(tokenCredential, String.format("%s/.default", endpoint)));
} else if (sasTokenCredential != null) {
policies.add(new SasTokenCredentialPolicy(sasTokenCredential));
}
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RequestRetryPolicy(retryOptions));
policies.addAll(additionalPolicies);
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new ResponseValidationPolicyBuilder()
.addOptionalEcho(Constants.HeaderConstants.CLIENT_REQUEST_ID)
.addOptionalEcho(Constants.HeaderConstants.ENCRYPTION_KEY_SHA256)
.build());
policies.add(new HttpLoggingPolicy(logOptions));
policies.add(new ScrubEtagPolicy());
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
} | if (!decryptionPolicyPresent) { | private HttpPipeline getHttpPipeline() {
if (httpPipeline != null) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
boolean decryptionPolicyPresent = false;
for (int i = 0; i < httpPipeline.getPolicyCount(); i++) {
HttpPipelinePolicy currPolicy = httpPipeline.getPolicy(i);
if (currPolicy instanceof BlobDecryptionPolicy) {
throw logger.logExceptionAsError(new IllegalArgumentException("The passed pipeline was already"
+ " configured for encryption/decryption in a way that might conflict with the passed key "
+ "information. Please ensure that the passed pipeline is not already configured for "
+ "encryption/decryption"));
}
policies.add(currPolicy);
}
policies.add(0, new BlobDecryptionPolicy(keyWrapper, keyResolver));
return new HttpPipelineBuilder()
.httpClient(httpPipeline.getHttpClient())
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
Configuration userAgentConfiguration = (configuration == null) ? Configuration.NONE : configuration;
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new BlobDecryptionPolicy(keyWrapper, keyResolver));
String clientName = USER_AGENT_PROPERTIES.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = USER_AGENT_PROPERTIES.getOrDefault(SDK_VERSION, "UnknownVersion");
policies.add(new UserAgentPolicy(logOptions.getApplicationId(), clientName, clientVersion,
userAgentConfiguration));
policies.add(new RequestIdPolicy());
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RequestRetryPolicy(retryOptions));
policies.add(new AddDatePolicy());
if (storageSharedKeyCredential != null) {
policies.add(new StorageSharedKeyCredentialPolicy(storageSharedKeyCredential));
} else if (tokenCredential != null) {
BuilderHelper.httpsValidation(tokenCredential, "bearer token", endpoint, logger);
policies.add(new BearerTokenAuthenticationPolicy(tokenCredential, String.format("%s/.default", endpoint)));
} else if (sasTokenCredential != null) {
policies.add(new SasTokenCredentialPolicy(sasTokenCredential));
}
policies.addAll(additionalPolicies);
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new ResponseValidationPolicyBuilder()
.addOptionalEcho(Constants.HeaderConstants.CLIENT_REQUEST_ID)
.addOptionalEcho(Constants.HeaderConstants.ENCRYPTION_KEY_SHA256)
.build());
policies.add(new HttpLoggingPolicy(logOptions));
policies.add(new ScrubEtagPolicy());
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
} | class EncryptedBlobClientBuilder {
private final ClientLogger logger = new ClientLogger(EncryptedBlobClientBuilder.class);
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private String endpoint;
private String accountName;
private String containerName;
private String blobName;
private String snapshot;
private StorageSharedKeyCredential storageSharedKeyCredential;
private TokenCredential tokenCredential;
private SasTokenCredential sasTokenCredential;
private HttpClient httpClient;
private final List<HttpPipelinePolicy> additionalPolicies = new ArrayList<>();
private HttpLogOptions logOptions;
private RequestRetryOptions retryOptions = new RequestRetryOptions();
private HttpPipeline httpPipeline;
private Configuration configuration;
private AsyncKeyEncryptionKey keyWrapper;
private AsyncKeyEncryptionKeyResolver keyResolver;
private String keyWrapAlgorithm;
private BlobServiceVersion version;
private CpkInfo customerProvidedKey;
/**
* Creates a new instance of the EncryptedBlobClientBuilder
*/
public EncryptedBlobClientBuilder() {
logOptions = getDefaultHttpLogOptions();
}
/**
* Creates a {@link EncryptedBlobClient} based on options set in the Builder.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClientBuilder.buildEncryptedBlobAsyncClient}
*
* @return a {@link EncryptedBlobClient} created from the configurations in this builder.
* @throws NullPointerException If {@code endpoint}, {@code containerName}, or {@code blobName} is {@code null}.
*/
public EncryptedBlobClient buildEncryptedBlobClient() {
return new EncryptedBlobClient(buildEncryptedBlobAsyncClient());
}
/**
* Creates a {@link EncryptedBlobAsyncClient} based on options set in the Builder.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClientBuilder.buildEncryptedBlobClient}
*
* @return a {@link EncryptedBlobAsyncClient} created from the configurations in this builder.
* @throws NullPointerException If {@code endpoint}, {@code containerName}, or {@code blobName} is {@code null}.
*/
public EncryptedBlobAsyncClient buildEncryptedBlobAsyncClient() {
Objects.requireNonNull(blobName, "'blobName' cannot be null.");
checkValidEncryptionParameters();
/*
Implicit and explicit root container access are functionally equivalent, but explicit references are easier
to read and debug.
*/
if (CoreUtils.isNullOrEmpty(containerName)) {
containerName = BlobContainerAsyncClient.ROOT_CONTAINER_NAME;
}
BlobServiceVersion serviceVersion = version != null ? version : BlobServiceVersion.getLatest();
return new EncryptedBlobAsyncClient(getHttpPipeline(),
String.format("%s/%s/%s", endpoint, containerName, blobName), serviceVersion, accountName, containerName,
blobName, snapshot, customerProvidedKey, keyWrapper, keyWrapAlgorithm);
}
/**
* Sets the encryption key parameters for the client
*
* @param key An object of type {@link AsyncKeyEncryptionKey} that is used to wrap/unwrap the content encryption key
* @param keyWrapAlgorithm The {@link String} used to wrap the key.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder key(AsyncKeyEncryptionKey key, String keyWrapAlgorithm) {
this.keyWrapper = key;
this.keyWrapAlgorithm = keyWrapAlgorithm;
return this;
}
/**
* Sets the encryption parameters for this client
*
* @param keyResolver The key resolver used to select the correct key for decrypting existing blobs.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder keyResolver(AsyncKeyEncryptionKeyResolver keyResolver) {
this.keyResolver = keyResolver;
return this;
}
private void checkValidEncryptionParameters() {
if (this.keyWrapper == null && this.keyResolver == null) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key and KeyResolver cannot both be null"));
}
if (this.keyWrapper != null && this.keyWrapAlgorithm == null) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key Wrap Algorithm must be specified with a Key."));
}
}
/**
* Sets the {@link StorageSharedKeyCredential} used to authorize requests sent to the service.
*
* @param credential {@link StorageSharedKeyCredential}.
* @return the updated EncryptedBlobClientBuilder
* @throws NullPointerException If {@code credential} is {@code null}.
*/
public EncryptedBlobClientBuilder credential(StorageSharedKeyCredential credential) {
this.storageSharedKeyCredential = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.tokenCredential = null;
this.sasTokenCredential = null;
return this;
}
/**
* Sets the {@link TokenCredential} used to authorize requests sent to the service.
*
* @param credential {@link TokenCredential}.
* @return the updated EncryptedBlobClientBuilder
* @throws NullPointerException If {@code credential} is {@code null}.
*/
public EncryptedBlobClientBuilder credential(TokenCredential credential) {
this.tokenCredential = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.storageSharedKeyCredential = null;
this.sasTokenCredential = null;
return this;
}
/**
* Sets the SAS token used to authorize requests sent to the service.
*
* @param sasToken The SAS token to use for authenticating requests.
* @return the updated EncryptedBlobClientBuilder
* @throws NullPointerException If {@code sasToken} is {@code null}.
*/
public EncryptedBlobClientBuilder sasToken(String sasToken) {
this.sasTokenCredential = new SasTokenCredential(Objects.requireNonNull(sasToken,
"'sasToken' cannot be null."));
this.storageSharedKeyCredential = null;
this.tokenCredential = null;
return this;
}
/**
* Clears the credential used to authorize the request.
*
* <p>This is for blobs that are publicly accessible.</p>
*
* @return the updated EncryptedBlobClientBuilder
*/
public EncryptedBlobClientBuilder setAnonymousAccess() {
this.storageSharedKeyCredential = null;
this.tokenCredential = null;
this.sasTokenCredential = null;
return this;
}
/**
* Sets the connection string to connect to the service.
*
* @param connectionString Connection string of the storage account.
* @return the updated EncryptedBlobClientBuilder
* @throws IllegalArgumentException If {@code connectionString} is invalid.
*/
public EncryptedBlobClientBuilder connectionString(String connectionString) {
StorageConnectionString storageConnectionString
= StorageConnectionString.create(connectionString, logger);
StorageEndpoint endpoint = storageConnectionString.getBlobEndpoint();
if (endpoint == null || endpoint.getPrimaryUri() == null) {
throw logger
.logExceptionAsError(new IllegalArgumentException(
"connectionString missing required settings to derive blob service endpoint."));
}
this.endpoint(endpoint.getPrimaryUri());
if (storageConnectionString.getAccountName() != null) {
this.accountName = storageConnectionString.getAccountName();
}
StorageAuthenticationSettings authSettings = storageConnectionString.getStorageAuthSettings();
if (authSettings.getType() == StorageAuthenticationSettings.Type.ACCOUNT_NAME_KEY) {
this.credential(new StorageSharedKeyCredential(authSettings.getAccount().getName(),
authSettings.getAccount().getAccessKey()));
} else if (authSettings.getType() == StorageAuthenticationSettings.Type.SAS_TOKEN) {
this.sasToken(authSettings.getSasToken());
}
return this;
}
/**
* Sets the service endpoint, additionally parses it for information (SAS token, container name, blob name)
*
* <p>If the endpoint is to a blob in the root container, this method will fail as it will interpret the blob name
* as the container name. With only one path element, it is impossible to distinguish between a container name and a
* blob in the root container, so it is assumed to be the container name as this is much more common. When working
* with blobs in the root container, it is best to set the endpoint to the account url and specify the blob name
* separately using the {@link EncryptedBlobClientBuilder
*
* @param endpoint URL of the service
* @return the updated EncryptedBlobClientBuilder object
* @throws IllegalArgumentException If {@code endpoint} is {@code null} or is a malformed URL.
*/
public EncryptedBlobClientBuilder endpoint(String endpoint) {
try {
URL url = new URL(endpoint);
BlobUrlParts parts = BlobUrlParts.parse(url);
this.accountName = parts.getAccountName();
this.endpoint = BuilderHelper.getEndpoint(parts);
this.containerName = parts.getBlobContainerName();
this.blobName = Utility.urlEncode(parts.getBlobName());
this.snapshot = parts.getSnapshot();
String sasToken = parts.getCommonSasQueryParameters().encode();
if (!CoreUtils.isNullOrEmpty(sasToken)) {
this.sasToken(sasToken);
}
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(
new IllegalArgumentException("The Azure Storage Blob endpoint url is malformed."));
}
return this;
}
/**
* Sets the name of the container that contains the blob.
*
* @param containerName Name of the container. If the value {@code null} or empty the root container, {@code $root},
* will be used.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder containerName(String containerName) {
this.containerName = containerName;
return this;
}
/**
* Sets the name of the blob.
*
* @param blobName Name of the blob.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code blobName} is {@code null}
*/
public EncryptedBlobClientBuilder blobName(String blobName) {
this.blobName = Utility.urlEncode(Utility.urlDecode(Objects.requireNonNull(blobName,
"'blobName' cannot be null.")));
return this;
}
/**
* Sets the snapshot identifier of the blob.
*
* @param snapshot Snapshot identifier for the blob.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder snapshot(String snapshot) {
this.snapshot = snapshot;
return this;
}
/**
* Sets the {@link HttpClient} to use for sending a receiving requests to and from the service.
*
* @param httpClient HttpClient to use for requests.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder httpClient(HttpClient httpClient) {
if (this.httpClient != null && httpClient == null) {
logger.info("'httpClient' is being set to 'null' when it was previously configured.");
}
this.httpClient = httpClient;
return this;
}
/**
* Adds a {@link HttpPipelinePolicy} to apply on each request sent. The policy will be added after the retry policy.
* If the method is called multiple times, all policies will be added and their order preserved.
*
* @param pipelinePolicy a pipeline policy
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code pipelinePolicy} is {@code null}.
*/
public EncryptedBlobClientBuilder addPolicy(HttpPipelinePolicy pipelinePolicy) {
this.additionalPolicies.add(Objects.requireNonNull(pipelinePolicy, "'pipelinePolicy' cannot be null"));
return this;
}
/**
* Sets the {@link HttpLogOptions} for service requests.
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code logOptions} is {@code null}.
*/
public EncryptedBlobClientBuilder httpLogOptions(HttpLogOptions logOptions) {
this.logOptions = Objects.requireNonNull(logOptions, "'logOptions' cannot be null.");
return this;
}
/**
* Gets the default Storage whitelist log headers and query parameters.
*
* @return the default http log options.
*/
public static HttpLogOptions getDefaultHttpLogOptions() {
return BuilderHelper.getDefaultHttpLogOptions();
}
/**
* Sets the configuration object used to retrieve environment configuration values during building of the client.
*
* @param configuration Configuration store used to retrieve environment configurations.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the request retry options for all the requests made through the client.
*
* @param retryOptions {@link RequestRetryOptions}.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code retryOptions} is {@code null}.
*/
public EncryptedBlobClientBuilder retryOptions(RequestRetryOptions retryOptions) {
this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null.");
return this;
}
/**
* Sets the {@link HttpPipeline} to use for the service client, and adds a decryption policy if one is not present.
* Note that the underlying pipeline should not already be configured for configured for encryption/decryption.
* <p>
* If {@code pipeline} is set, all other settings are ignored, aside from {@link
* and {@link
*
* @param httpPipeline HttpPipeline to use for sending service requests and receiving responses.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder pipeline(HttpPipeline httpPipeline) {
if (this.httpPipeline != null && httpPipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.httpPipeline = httpPipeline;
return this;
}
/**
* Sets the {@link BlobServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version of the client library will have the result of potentially moving to a newer service version.
* <p>
* Targeting a specific service version may also mean that the service will return an error for newer APIs.
*
* @param version {@link BlobServiceVersion} of the service to be used when making requests.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder serviceVersion(BlobServiceVersion version) {
this.version = version;
return this;
}
/**
* Sets the {@link CustomerProvidedKey customer provided key} that is used to encrypt blob contents on the server.
*
* @param customerProvidedKey {@link CustomerProvidedKey}
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder customerProvidedKey(CustomerProvidedKey customerProvidedKey) {
if (customerProvidedKey == null) {
this.customerProvidedKey = null;
} else {
this.customerProvidedKey = new CpkInfo()
.setEncryptionKey(customerProvidedKey.getKey())
.setEncryptionKeySha256(customerProvidedKey.getKeySha256())
.setEncryptionAlgorithm(customerProvidedKey.getEncryptionAlgorithm());
}
return this;
}
/**
* Configures the builder based on the passed {@link BlobClient}. This will set the {@link HttpPipeline},
* {@link URL} and {@link BlobServiceVersion} that are used to interact with the service. Note that the underlying
* pipeline should not already be configured for configured for encryption/decryption.
*
* <p>If {@code pipeline} is set, all other settings are ignored, aside from {@link
* {@link
*
* <p>Note that for security reasons, this method does not copy over the {@link CustomerProvidedKey} and
* encryption scope properties from the provided client. To set CPK, please use
* {@link
*
* @param blobClient BlobClient used to configure the builder.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code containerClient} is {@code null}.
*/
public EncryptedBlobClientBuilder blobClient(BlobClient blobClient) {
Objects.requireNonNull(blobClient);
return client(blobClient.getHttpPipeline(), blobClient.getBlobUrl(), blobClient.getServiceVersion());
}
/**
* Configures the builder based on the passed {@link BlobAsyncClient}. This will set the {@link HttpPipeline},
* {@link URL} and {@link BlobServiceVersion} that are used to interact with the service. Note that the underlying
* pipeline should not already be configured for configured for encryption/decryption.
*
* <p>If {@code pipeline} is set, all other settings are ignored, aside from {@link
* {@link
*
* <p>Note that for security reasons, this method does not copy over the {@link CustomerProvidedKey} and
* encryption scope properties from the provided client. To set CPK, please use
* {@link
*
* @param blobAsyncClient BlobAsyncClient used to configure the builder.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code containerClient} is {@code null}.
*/
public EncryptedBlobClientBuilder blobAsyncClient(BlobAsyncClient blobAsyncClient) {
Objects.requireNonNull(blobAsyncClient);
return client(blobAsyncClient.getHttpPipeline(), blobAsyncClient.getBlobUrl(),
blobAsyncClient.getServiceVersion());
}
/**
* Helper method to transform a regular client into an encrypted client
*
* @param httpPipeline {@link HttpPipeline}
* @param endpoint The endpoint.
* @param version {@link BlobServiceVersion} of the service to be used when making requests.
* @return the updated EncryptedBlobClientBuilder object
*/
private EncryptedBlobClientBuilder client(HttpPipeline httpPipeline, String endpoint, BlobServiceVersion version) {
this.endpoint(endpoint);
this.serviceVersion(version);
return this.pipeline(httpPipeline);
}
} | class EncryptedBlobClientBuilder {
private final ClientLogger logger = new ClientLogger(EncryptedBlobClientBuilder.class);
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private String endpoint;
private String accountName;
private String containerName;
private String blobName;
private String snapshot;
private StorageSharedKeyCredential storageSharedKeyCredential;
private TokenCredential tokenCredential;
private SasTokenCredential sasTokenCredential;
private HttpClient httpClient;
private final List<HttpPipelinePolicy> additionalPolicies = new ArrayList<>();
private HttpLogOptions logOptions;
private RequestRetryOptions retryOptions = new RequestRetryOptions();
private HttpPipeline httpPipeline;
private Configuration configuration;
private AsyncKeyEncryptionKey keyWrapper;
private AsyncKeyEncryptionKeyResolver keyResolver;
private String keyWrapAlgorithm;
private BlobServiceVersion version;
private CpkInfo customerProvidedKey;
/**
* Creates a new instance of the EncryptedBlobClientBuilder
*/
public EncryptedBlobClientBuilder() {
logOptions = getDefaultHttpLogOptions();
}
/**
* Creates a {@link EncryptedBlobClient} based on options set in the Builder.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClientBuilder.buildEncryptedBlobAsyncClient}
*
* @return a {@link EncryptedBlobClient} created from the configurations in this builder.
* @throws NullPointerException If {@code endpoint}, {@code containerName}, or {@code blobName} is {@code null}.
*/
public EncryptedBlobClient buildEncryptedBlobClient() {
return new EncryptedBlobClient(buildEncryptedBlobAsyncClient());
}
/**
* Creates a {@link EncryptedBlobAsyncClient} based on options set in the Builder.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClientBuilder.buildEncryptedBlobClient}
*
* @return a {@link EncryptedBlobAsyncClient} created from the configurations in this builder.
* @throws NullPointerException If {@code endpoint}, {@code containerName}, or {@code blobName} is {@code null}.
*/
public EncryptedBlobAsyncClient buildEncryptedBlobAsyncClient() {
Objects.requireNonNull(blobName, "'blobName' cannot be null.");
checkValidEncryptionParameters();
/*
Implicit and explicit root container access are functionally equivalent, but explicit references are easier
to read and debug.
*/
if (CoreUtils.isNullOrEmpty(containerName)) {
containerName = BlobContainerAsyncClient.ROOT_CONTAINER_NAME;
}
BlobServiceVersion serviceVersion = version != null ? version : BlobServiceVersion.getLatest();
return new EncryptedBlobAsyncClient(getHttpPipeline(),
String.format("%s/%s/%s", endpoint, containerName, blobName), serviceVersion, accountName, containerName,
blobName, snapshot, customerProvidedKey, keyWrapper, keyWrapAlgorithm);
}
/**
* Sets the encryption key parameters for the client
*
* @param key An object of type {@link AsyncKeyEncryptionKey} that is used to wrap/unwrap the content encryption key
* @param keyWrapAlgorithm The {@link String} used to wrap the key.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder key(AsyncKeyEncryptionKey key, String keyWrapAlgorithm) {
this.keyWrapper = key;
this.keyWrapAlgorithm = keyWrapAlgorithm;
return this;
}
/**
* Sets the encryption parameters for this client
*
* @param keyResolver The key resolver used to select the correct key for decrypting existing blobs.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder keyResolver(AsyncKeyEncryptionKeyResolver keyResolver) {
this.keyResolver = keyResolver;
return this;
}
private void checkValidEncryptionParameters() {
if (this.keyWrapper == null && this.keyResolver == null) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key and KeyResolver cannot both be null"));
}
if (this.keyWrapper != null && this.keyWrapAlgorithm == null) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key Wrap Algorithm must be specified with a Key."));
}
}
/**
* Sets the {@link StorageSharedKeyCredential} used to authorize requests sent to the service.
*
* @param credential {@link StorageSharedKeyCredential}.
* @return the updated EncryptedBlobClientBuilder
* @throws NullPointerException If {@code credential} is {@code null}.
*/
public EncryptedBlobClientBuilder credential(StorageSharedKeyCredential credential) {
this.storageSharedKeyCredential = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.tokenCredential = null;
this.sasTokenCredential = null;
return this;
}
/**
* Sets the {@link TokenCredential} used to authorize requests sent to the service.
*
* @param credential {@link TokenCredential}.
* @return the updated EncryptedBlobClientBuilder
* @throws NullPointerException If {@code credential} is {@code null}.
*/
public EncryptedBlobClientBuilder credential(TokenCredential credential) {
this.tokenCredential = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.storageSharedKeyCredential = null;
this.sasTokenCredential = null;
return this;
}
/**
* Sets the SAS token used to authorize requests sent to the service.
*
* @param sasToken The SAS token to use for authenticating requests.
* @return the updated EncryptedBlobClientBuilder
* @throws NullPointerException If {@code sasToken} is {@code null}.
*/
public EncryptedBlobClientBuilder sasToken(String sasToken) {
this.sasTokenCredential = new SasTokenCredential(Objects.requireNonNull(sasToken,
"'sasToken' cannot be null."));
this.storageSharedKeyCredential = null;
this.tokenCredential = null;
return this;
}
/**
* Clears the credential used to authorize the request.
*
* <p>This is for blobs that are publicly accessible.</p>
*
* @return the updated EncryptedBlobClientBuilder
*/
public EncryptedBlobClientBuilder setAnonymousAccess() {
this.storageSharedKeyCredential = null;
this.tokenCredential = null;
this.sasTokenCredential = null;
return this;
}
/**
* Sets the connection string to connect to the service.
*
* @param connectionString Connection string of the storage account.
* @return the updated EncryptedBlobClientBuilder
* @throws IllegalArgumentException If {@code connectionString} is invalid.
*/
public EncryptedBlobClientBuilder connectionString(String connectionString) {
StorageConnectionString storageConnectionString
= StorageConnectionString.create(connectionString, logger);
StorageEndpoint endpoint = storageConnectionString.getBlobEndpoint();
if (endpoint == null || endpoint.getPrimaryUri() == null) {
throw logger
.logExceptionAsError(new IllegalArgumentException(
"connectionString missing required settings to derive blob service endpoint."));
}
this.endpoint(endpoint.getPrimaryUri());
if (storageConnectionString.getAccountName() != null) {
this.accountName = storageConnectionString.getAccountName();
}
StorageAuthenticationSettings authSettings = storageConnectionString.getStorageAuthSettings();
if (authSettings.getType() == StorageAuthenticationSettings.Type.ACCOUNT_NAME_KEY) {
this.credential(new StorageSharedKeyCredential(authSettings.getAccount().getName(),
authSettings.getAccount().getAccessKey()));
} else if (authSettings.getType() == StorageAuthenticationSettings.Type.SAS_TOKEN) {
this.sasToken(authSettings.getSasToken());
}
return this;
}
/**
* Sets the service endpoint, additionally parses it for information (SAS token, container name, blob name)
*
* <p>If the endpoint is to a blob in the root container, this method will fail as it will interpret the blob name
* as the container name. With only one path element, it is impossible to distinguish between a container name and a
* blob in the root container, so it is assumed to be the container name as this is much more common. When working
* with blobs in the root container, it is best to set the endpoint to the account url and specify the blob name
* separately using the {@link EncryptedBlobClientBuilder
*
* @param endpoint URL of the service
* @return the updated EncryptedBlobClientBuilder object
* @throws IllegalArgumentException If {@code endpoint} is {@code null} or is a malformed URL.
*/
public EncryptedBlobClientBuilder endpoint(String endpoint) {
try {
URL url = new URL(endpoint);
BlobUrlParts parts = BlobUrlParts.parse(url);
this.accountName = parts.getAccountName();
this.endpoint = BuilderHelper.getEndpoint(parts);
this.containerName = parts.getBlobContainerName();
this.blobName = Utility.urlEncode(parts.getBlobName());
this.snapshot = parts.getSnapshot();
String sasToken = parts.getCommonSasQueryParameters().encode();
if (!CoreUtils.isNullOrEmpty(sasToken)) {
this.sasToken(sasToken);
}
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(
new IllegalArgumentException("The Azure Storage Blob endpoint url is malformed."));
}
return this;
}
/**
* Sets the name of the container that contains the blob.
*
* @param containerName Name of the container. If the value {@code null} or empty the root container, {@code $root},
* will be used.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder containerName(String containerName) {
this.containerName = containerName;
return this;
}
/**
* Sets the name of the blob.
*
* @param blobName Name of the blob.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code blobName} is {@code null}
*/
public EncryptedBlobClientBuilder blobName(String blobName) {
this.blobName = Utility.urlEncode(Utility.urlDecode(Objects.requireNonNull(blobName,
"'blobName' cannot be null.")));
return this;
}
/**
* Sets the snapshot identifier of the blob.
*
* @param snapshot Snapshot identifier for the blob.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder snapshot(String snapshot) {
this.snapshot = snapshot;
return this;
}
/**
* Sets the {@link HttpClient} to use for sending a receiving requests to and from the service.
*
* @param httpClient HttpClient to use for requests.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder httpClient(HttpClient httpClient) {
if (this.httpClient != null && httpClient == null) {
logger.info("'httpClient' is being set to 'null' when it was previously configured.");
}
this.httpClient = httpClient;
return this;
}
/**
* Adds a {@link HttpPipelinePolicy} to apply on each request sent. The policy will be added after the retry policy.
* If the method is called multiple times, all policies will be added and their order preserved.
*
* @param pipelinePolicy a pipeline policy
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code pipelinePolicy} is {@code null}.
*/
public EncryptedBlobClientBuilder addPolicy(HttpPipelinePolicy pipelinePolicy) {
this.additionalPolicies.add(Objects.requireNonNull(pipelinePolicy, "'pipelinePolicy' cannot be null"));
return this;
}
/**
* Sets the {@link HttpLogOptions} for service requests.
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code logOptions} is {@code null}.
*/
public EncryptedBlobClientBuilder httpLogOptions(HttpLogOptions logOptions) {
this.logOptions = Objects.requireNonNull(logOptions, "'logOptions' cannot be null.");
return this;
}
/**
* Gets the default Storage whitelist log headers and query parameters.
*
* @return the default http log options.
*/
public static HttpLogOptions getDefaultHttpLogOptions() {
return BuilderHelper.getDefaultHttpLogOptions();
}
/**
* Sets the configuration object used to retrieve environment configuration values during building of the client.
*
* @param configuration Configuration store used to retrieve environment configurations.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the request retry options for all the requests made through the client.
*
* @param retryOptions {@link RequestRetryOptions}.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code retryOptions} is {@code null}.
*/
public EncryptedBlobClientBuilder retryOptions(RequestRetryOptions retryOptions) {
this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null.");
return this;
}
/**
* Sets the {@link HttpPipeline} to use for the service client, and adds a decryption policy if one is not present.
* Note that the underlying pipeline should not already be configured for encryption/decryption.
* <p>
* If {@code pipeline} is set, all other settings are ignored, aside from {@link
* and {@link
*
* @param httpPipeline HttpPipeline to use for sending service requests and receiving responses.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder pipeline(HttpPipeline httpPipeline) {
if (this.httpPipeline != null && httpPipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.httpPipeline = httpPipeline;
return this;
}
/**
* Sets the {@link BlobServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version of the client library will have the result of potentially moving to a newer service version.
* <p>
* Targeting a specific service version may also mean that the service will return an error for newer APIs.
*
* @param version {@link BlobServiceVersion} of the service to be used when making requests.
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder serviceVersion(BlobServiceVersion version) {
this.version = version;
return this;
}
/**
* Sets the {@link CustomerProvidedKey customer provided key} that is used to encrypt blob contents on the server.
*
* @param customerProvidedKey {@link CustomerProvidedKey}
* @return the updated EncryptedBlobClientBuilder object
*/
public EncryptedBlobClientBuilder customerProvidedKey(CustomerProvidedKey customerProvidedKey) {
if (customerProvidedKey == null) {
this.customerProvidedKey = null;
} else {
this.customerProvidedKey = new CpkInfo()
.setEncryptionKey(customerProvidedKey.getKey())
.setEncryptionKeySha256(customerProvidedKey.getKeySha256())
.setEncryptionAlgorithm(customerProvidedKey.getEncryptionAlgorithm());
}
return this;
}
/**
* Configures the builder based on the passed {@link BlobClient}. This will set the {@link HttpPipeline},
* {@link URL} and {@link BlobServiceVersion} that are used to interact with the service. Note that the underlying
* pipeline should not already be configured for encryption/decryption.
*
* <p>If {@code pipeline} is set, all other settings are ignored, aside from {@link
* {@link
*
* <p>Note that for security reasons, this method does not copy over the {@link CustomerProvidedKey} and
* encryption scope properties from the provided client. To set CPK, please use
* {@link
*
* @param blobClient BlobClient used to configure the builder.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code containerClient} is {@code null}.
*/
public EncryptedBlobClientBuilder blobClient(BlobClient blobClient) {
Objects.requireNonNull(blobClient);
return client(blobClient.getHttpPipeline(), blobClient.getBlobUrl(), blobClient.getServiceVersion());
}
/**
* Configures the builder based on the passed {@link BlobAsyncClient}. This will set the {@link HttpPipeline},
* {@link URL} and {@link BlobServiceVersion} that are used to interact with the service. Note that the underlying
* pipeline should not already be configured for encryption/decryption.
*
* <p>If {@code pipeline} is set, all other settings are ignored, aside from {@link
* {@link
*
* <p>Note that for security reasons, this method does not copy over the {@link CustomerProvidedKey} and
* encryption scope properties from the provided client. To set CPK, please use
* {@link
*
* @param blobAsyncClient BlobAsyncClient used to configure the builder.
* @return the updated EncryptedBlobClientBuilder object
* @throws NullPointerException If {@code containerClient} is {@code null}.
*/
public EncryptedBlobClientBuilder blobAsyncClient(BlobAsyncClient blobAsyncClient) {
Objects.requireNonNull(blobAsyncClient);
return client(blobAsyncClient.getHttpPipeline(), blobAsyncClient.getBlobUrl(),
blobAsyncClient.getServiceVersion());
}
/**
* Helper method to transform a regular client into an encrypted client
*
* @param httpPipeline {@link HttpPipeline}
* @param endpoint The endpoint.
* @param version {@link BlobServiceVersion} of the service to be used when making requests.
* @return the updated EncryptedBlobClientBuilder object
*/
private EncryptedBlobClientBuilder client(HttpPipeline httpPipeline, String endpoint, BlobServiceVersion version) {
this.endpoint(endpoint);
this.serviceVersion(version);
return this.pipeline(httpPipeline);
}
} |
Should we use Collator.getInstance(Locale) ? Otherwise this will use local machine or JVM default which might not be what we expect. I guess en_US would be safe choice. | private String getAdditionalXmsHeaders(Map<String, String> headers) {
final List<String> xmsHeaderNameArray = headers.entrySet().stream()
.filter(entry -> entry.getKey().toLowerCase(Locale.ROOT).startsWith("x-ms-"))
.filter(entry -> entry.getValue() != null)
.map(Map.Entry::getKey)
.collect(Collectors.toList());
if (xmsHeaderNameArray.isEmpty()) {
return "";
}
/* Culture-sensitive word sort */
Collections.sort(xmsHeaderNameArray, Collator.getInstance());
final StringBuilder canonicalizedHeaders = new StringBuilder();
for (final String key : xmsHeaderNameArray) {
if (canonicalizedHeaders.length() > 0) {
canonicalizedHeaders.append('\n');
}
canonicalizedHeaders.append(key.toLowerCase(Locale.ROOT))
.append(':')
.append(headers.get(key));
}
return canonicalizedHeaders.toString();
} | Collections.sort(xmsHeaderNameArray, Collator.getInstance()); | private String getAdditionalXmsHeaders(Map<String, String> headers) {
final List<String> xmsHeaderNameArray = headers.entrySet().stream()
.filter(entry -> entry.getKey().toLowerCase(Locale.ROOT).startsWith("x-ms-"))
.filter(entry -> entry.getValue() != null)
.map(Map.Entry::getKey)
.collect(Collectors.toList());
if (xmsHeaderNameArray.isEmpty()) {
return "";
}
/* Culture-sensitive word sort */
Collections.sort(xmsHeaderNameArray, Collator.getInstance(Locale.ROOT));
final StringBuilder canonicalizedHeaders = new StringBuilder();
for (final String key : xmsHeaderNameArray) {
if (canonicalizedHeaders.length() > 0) {
canonicalizedHeaders.append('\n');
}
canonicalizedHeaders.append(key.toLowerCase(Locale.ROOT))
.append(':')
.append(headers.get(key));
}
return canonicalizedHeaders.toString();
} | class StorageSharedKeyCredential {
private static final String AUTHORIZATION_HEADER_FORMAT = "SharedKey %s:%s";
private static final String ACCOUNT_NAME = "accountname";
private static final String ACCOUNT_KEY = "accountkey";
private final String accountName;
private final String accountKey;
/**
* Initializes a new instance of StorageSharedKeyCredential contains an account's name and its primary or secondary
* accountKey.
*
* @param accountName The account name associated with the request.
* @param accountKey The account access key used to authenticate the request.
*/
public StorageSharedKeyCredential(String accountName, String accountKey) {
Objects.requireNonNull(accountName, "'accountName' cannot be null.");
Objects.requireNonNull(accountKey, "'accountKey' cannot be null.");
this.accountName = accountName;
this.accountKey = accountKey;
}
/**
* Creates a SharedKey credential from the passed connection string.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.common.StorageSharedKeyCredential.fromConnectionString
*
* @param connectionString Connection string used to build the SharedKey credential.
* @return a SharedKey credential if the connection string contains AccountName and AccountKey
* @throws IllegalArgumentException If {@code connectionString} doesn't have AccountName or AccountKey.
*/
public static StorageSharedKeyCredential fromConnectionString(String connectionString) {
HashMap<String, String> connectionStringPieces = new HashMap<>();
for (String connectionStringPiece : connectionString.split(";")) {
String[] kvp = connectionStringPiece.split("=", 2);
connectionStringPieces.put(kvp[0].toLowerCase(Locale.ROOT), kvp[1]);
}
String accountName = connectionStringPieces.get(ACCOUNT_NAME);
String accountKey = connectionStringPieces.get(ACCOUNT_KEY);
if (CoreUtils.isNullOrEmpty(accountName) || CoreUtils.isNullOrEmpty(accountKey)) {
throw new IllegalArgumentException("Connection string must contain 'AccountName' and 'AccountKey'.");
}
return new StorageSharedKeyCredential(accountName, accountKey);
}
/**
* Gets the account name associated with the request.
*
* @return The account name.
*/
public String getAccountName() {
return accountName;
}
/**
* Generates the SharedKey Authorization value from information in the request.
* @param requestURL URL of the request
* @param httpMethod HTTP method being used
* @param headers Headers on the request
* @return the SharedKey authorization value
*/
public String generateAuthorizationHeader(URL requestURL, String httpMethod, Map<String, String> headers) {
String signature = StorageImplUtils.computeHMac256(accountKey,
buildStringToSign(requestURL, httpMethod, headers));
return String.format(AUTHORIZATION_HEADER_FORMAT, accountName, signature);
}
/**
* Computes a signature for the specified string using the HMAC-SHA256 algorithm.
* Package-private because it is used to generate SAS signatures.
*
* @param stringToSign The UTF-8-encoded string to sign.
* @return A {@code String} that contains the HMAC-SHA256-encoded signature.
* @throws RuntimeException If the HMAC-SHA256 algorithm isn't support, if the key isn't a valid Base64 encoded
* string, or the UTF-8 charset isn't supported.
*/
public String computeHmac256(final String stringToSign) {
return StorageImplUtils.computeHMac256(accountKey, stringToSign);
}
private String buildStringToSign(URL requestURL, String httpMethod, Map<String, String> headers) {
String contentLength = headers.get("Content-Length");
contentLength = contentLength.equals("0") ? "" : contentLength;
String dateHeader = (headers.containsKey("x-ms-date")) ? ""
: getStandardHeaderValue(headers, "Date");
return String.join("\n",
httpMethod,
getStandardHeaderValue(headers, "Content-Encoding"),
getStandardHeaderValue(headers, "Content-Language"),
contentLength,
getStandardHeaderValue(headers, "Content-MD5"),
getStandardHeaderValue(headers, "Content-Type"),
dateHeader,
getStandardHeaderValue(headers, "If-Modified-Since"),
getStandardHeaderValue(headers, "If-Match"),
getStandardHeaderValue(headers, "If-None-Match"),
getStandardHeaderValue(headers, "If-Unmodified-Since"),
getStandardHeaderValue(headers, "Range"),
getAdditionalXmsHeaders(headers),
getCanonicalizedResource(requestURL));
}
/*
* Returns an empty string if the header value is null or empty.
*/
private String getStandardHeaderValue(Map<String, String> headers, String headerName) {
final String headerValue = headers.get(headerName);
return headerValue == null ? "" : headerValue;
}
private String getCanonicalizedResource(URL requestURL) {
final StringBuilder canonicalizedResource = new StringBuilder("/");
canonicalizedResource.append(accountName);
if (requestURL.getPath().length() > 0) {
canonicalizedResource.append(requestURL.getPath());
} else {
canonicalizedResource.append('/');
}
if (requestURL.getQuery() == null) {
return canonicalizedResource.toString();
}
Map<String, String[]> queryParams = StorageImplUtils.parseQueryStringSplitValues(requestURL.getQuery());
ArrayList<String> queryParamNames = new ArrayList<>(queryParams.keySet());
Collections.sort(queryParamNames);
for (String queryParamName : queryParamNames) {
String[] queryParamValues = queryParams.get(queryParamName);
Arrays.sort(queryParamValues);
String queryParamValuesStr = String.join(",", queryParamValues);
canonicalizedResource.append("\n")
.append(queryParamName.toLowerCase(Locale.ROOT))
.append(":")
.append(queryParamValuesStr);
}
return canonicalizedResource.toString();
}
/**
* Searches for a {@link StorageSharedKeyCredential} in the passed {@link HttpPipeline}.
*
* @param httpPipeline Pipeline being searched
* @return a StorageSharedKeyCredential if the pipeline contains one, otherwise null.
*/
public static StorageSharedKeyCredential getSharedKeyCredentialFromPipeline(HttpPipeline httpPipeline) {
for (int i = 0; i < httpPipeline.getPolicyCount(); i++) {
HttpPipelinePolicy httpPipelinePolicy = httpPipeline.getPolicy(i);
if (httpPipelinePolicy instanceof StorageSharedKeyCredentialPolicy) {
StorageSharedKeyCredentialPolicy storageSharedKeyCredentialPolicy =
(StorageSharedKeyCredentialPolicy) httpPipelinePolicy;
return storageSharedKeyCredentialPolicy.sharedKeyCredential();
}
}
return null;
}
} | class StorageSharedKeyCredential {
private static final String AUTHORIZATION_HEADER_FORMAT = "SharedKey %s:%s";
private static final String ACCOUNT_NAME = "accountname";
private static final String ACCOUNT_KEY = "accountkey";
private final String accountName;
private final String accountKey;
/**
* Initializes a new instance of StorageSharedKeyCredential contains an account's name and its primary or secondary
* accountKey.
*
* @param accountName The account name associated with the request.
* @param accountKey The account access key used to authenticate the request.
*/
public StorageSharedKeyCredential(String accountName, String accountKey) {
Objects.requireNonNull(accountName, "'accountName' cannot be null.");
Objects.requireNonNull(accountKey, "'accountKey' cannot be null.");
this.accountName = accountName;
this.accountKey = accountKey;
}
/**
* Creates a SharedKey credential from the passed connection string.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.common.StorageSharedKeyCredential.fromConnectionString
*
* @param connectionString Connection string used to build the SharedKey credential.
* @return a SharedKey credential if the connection string contains AccountName and AccountKey
* @throws IllegalArgumentException If {@code connectionString} doesn't have AccountName or AccountKey.
*/
public static StorageSharedKeyCredential fromConnectionString(String connectionString) {
HashMap<String, String> connectionStringPieces = new HashMap<>();
for (String connectionStringPiece : connectionString.split(";")) {
String[] kvp = connectionStringPiece.split("=", 2);
connectionStringPieces.put(kvp[0].toLowerCase(Locale.ROOT), kvp[1]);
}
String accountName = connectionStringPieces.get(ACCOUNT_NAME);
String accountKey = connectionStringPieces.get(ACCOUNT_KEY);
if (CoreUtils.isNullOrEmpty(accountName) || CoreUtils.isNullOrEmpty(accountKey)) {
throw new IllegalArgumentException("Connection string must contain 'AccountName' and 'AccountKey'.");
}
return new StorageSharedKeyCredential(accountName, accountKey);
}
/**
* Gets the account name associated with the request.
*
* @return The account name.
*/
public String getAccountName() {
return accountName;
}
/**
* Generates the SharedKey Authorization value from information in the request.
* @param requestURL URL of the request
* @param httpMethod HTTP method being used
* @param headers Headers on the request
* @return the SharedKey authorization value
*/
public String generateAuthorizationHeader(URL requestURL, String httpMethod, Map<String, String> headers) {
String signature = StorageImplUtils.computeHMac256(accountKey,
buildStringToSign(requestURL, httpMethod, headers));
return String.format(AUTHORIZATION_HEADER_FORMAT, accountName, signature);
}
/**
* Computes a signature for the specified string using the HMAC-SHA256 algorithm.
* Package-private because it is used to generate SAS signatures.
*
* @param stringToSign The UTF-8-encoded string to sign.
* @return A {@code String} that contains the HMAC-SHA256-encoded signature.
* @throws RuntimeException If the HMAC-SHA256 algorithm isn't support, if the key isn't a valid Base64 encoded
* string, or the UTF-8 charset isn't supported.
*/
public String computeHmac256(final String stringToSign) {
return StorageImplUtils.computeHMac256(accountKey, stringToSign);
}
private String buildStringToSign(URL requestURL, String httpMethod, Map<String, String> headers) {
String contentLength = headers.get("Content-Length");
contentLength = contentLength.equals("0") ? "" : contentLength;
String dateHeader = (headers.containsKey("x-ms-date")) ? ""
: getStandardHeaderValue(headers, "Date");
return String.join("\n",
httpMethod,
getStandardHeaderValue(headers, "Content-Encoding"),
getStandardHeaderValue(headers, "Content-Language"),
contentLength,
getStandardHeaderValue(headers, "Content-MD5"),
getStandardHeaderValue(headers, "Content-Type"),
dateHeader,
getStandardHeaderValue(headers, "If-Modified-Since"),
getStandardHeaderValue(headers, "If-Match"),
getStandardHeaderValue(headers, "If-None-Match"),
getStandardHeaderValue(headers, "If-Unmodified-Since"),
getStandardHeaderValue(headers, "Range"),
getAdditionalXmsHeaders(headers),
getCanonicalizedResource(requestURL));
}
/*
* Returns an empty string if the header value is null or empty.
*/
private String getStandardHeaderValue(Map<String, String> headers, String headerName) {
final String headerValue = headers.get(headerName);
return headerValue == null ? "" : headerValue;
}
private String getCanonicalizedResource(URL requestURL) {
final StringBuilder canonicalizedResource = new StringBuilder("/");
canonicalizedResource.append(accountName);
if (requestURL.getPath().length() > 0) {
canonicalizedResource.append(requestURL.getPath());
} else {
canonicalizedResource.append('/');
}
if (requestURL.getQuery() == null) {
return canonicalizedResource.toString();
}
Map<String, String[]> queryParams = StorageImplUtils.parseQueryStringSplitValues(requestURL.getQuery());
ArrayList<String> queryParamNames = new ArrayList<>(queryParams.keySet());
Collections.sort(queryParamNames);
for (String queryParamName : queryParamNames) {
String[] queryParamValues = queryParams.get(queryParamName);
Arrays.sort(queryParamValues);
String queryParamValuesStr = String.join(",", queryParamValues);
canonicalizedResource.append("\n")
.append(queryParamName.toLowerCase(Locale.ROOT))
.append(":")
.append(queryParamValuesStr);
}
return canonicalizedResource.toString();
}
/**
* Searches for a {@link StorageSharedKeyCredential} in the passed {@link HttpPipeline}.
*
* @param httpPipeline Pipeline being searched
* @return a StorageSharedKeyCredential if the pipeline contains one, otherwise null.
*/
public static StorageSharedKeyCredential getSharedKeyCredentialFromPipeline(HttpPipeline httpPipeline) {
for (int i = 0; i < httpPipeline.getPolicyCount(); i++) {
HttpPipelinePolicy httpPipelinePolicy = httpPipeline.getPolicy(i);
if (httpPipelinePolicy instanceof StorageSharedKeyCredentialPolicy) {
StorageSharedKeyCredentialPolicy storageSharedKeyCredentialPolicy =
(StorageSharedKeyCredentialPolicy) httpPipelinePolicy;
return storageSharedKeyCredentialPolicy.sharedKeyCredential();
}
}
return null;
}
} |
good point, yeah I wasn't too sure which one to pin it to if I did. There's also Locale.ROOT, Locale.US and Locale.ENGLISH | private String getAdditionalXmsHeaders(Map<String, String> headers) {
final List<String> xmsHeaderNameArray = headers.entrySet().stream()
.filter(entry -> entry.getKey().toLowerCase(Locale.ROOT).startsWith("x-ms-"))
.filter(entry -> entry.getValue() != null)
.map(Map.Entry::getKey)
.collect(Collectors.toList());
if (xmsHeaderNameArray.isEmpty()) {
return "";
}
/* Culture-sensitive word sort */
Collections.sort(xmsHeaderNameArray, Collator.getInstance());
final StringBuilder canonicalizedHeaders = new StringBuilder();
for (final String key : xmsHeaderNameArray) {
if (canonicalizedHeaders.length() > 0) {
canonicalizedHeaders.append('\n');
}
canonicalizedHeaders.append(key.toLowerCase(Locale.ROOT))
.append(':')
.append(headers.get(key));
}
return canonicalizedHeaders.toString();
} | Collections.sort(xmsHeaderNameArray, Collator.getInstance()); | private String getAdditionalXmsHeaders(Map<String, String> headers) {
final List<String> xmsHeaderNameArray = headers.entrySet().stream()
.filter(entry -> entry.getKey().toLowerCase(Locale.ROOT).startsWith("x-ms-"))
.filter(entry -> entry.getValue() != null)
.map(Map.Entry::getKey)
.collect(Collectors.toList());
if (xmsHeaderNameArray.isEmpty()) {
return "";
}
/* Culture-sensitive word sort */
Collections.sort(xmsHeaderNameArray, Collator.getInstance(Locale.ROOT));
final StringBuilder canonicalizedHeaders = new StringBuilder();
for (final String key : xmsHeaderNameArray) {
if (canonicalizedHeaders.length() > 0) {
canonicalizedHeaders.append('\n');
}
canonicalizedHeaders.append(key.toLowerCase(Locale.ROOT))
.append(':')
.append(headers.get(key));
}
return canonicalizedHeaders.toString();
} | class StorageSharedKeyCredential {
private static final String AUTHORIZATION_HEADER_FORMAT = "SharedKey %s:%s";
private static final String ACCOUNT_NAME = "accountname";
private static final String ACCOUNT_KEY = "accountkey";
private final String accountName;
private final String accountKey;
/**
* Initializes a new instance of StorageSharedKeyCredential contains an account's name and its primary or secondary
* accountKey.
*
* @param accountName The account name associated with the request.
* @param accountKey The account access key used to authenticate the request.
*/
public StorageSharedKeyCredential(String accountName, String accountKey) {
Objects.requireNonNull(accountName, "'accountName' cannot be null.");
Objects.requireNonNull(accountKey, "'accountKey' cannot be null.");
this.accountName = accountName;
this.accountKey = accountKey;
}
/**
* Creates a SharedKey credential from the passed connection string.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.common.StorageSharedKeyCredential.fromConnectionString
*
* @param connectionString Connection string used to build the SharedKey credential.
* @return a SharedKey credential if the connection string contains AccountName and AccountKey
* @throws IllegalArgumentException If {@code connectionString} doesn't have AccountName or AccountKey.
*/
public static StorageSharedKeyCredential fromConnectionString(String connectionString) {
HashMap<String, String> connectionStringPieces = new HashMap<>();
for (String connectionStringPiece : connectionString.split(";")) {
String[] kvp = connectionStringPiece.split("=", 2);
connectionStringPieces.put(kvp[0].toLowerCase(Locale.ROOT), kvp[1]);
}
String accountName = connectionStringPieces.get(ACCOUNT_NAME);
String accountKey = connectionStringPieces.get(ACCOUNT_KEY);
if (CoreUtils.isNullOrEmpty(accountName) || CoreUtils.isNullOrEmpty(accountKey)) {
throw new IllegalArgumentException("Connection string must contain 'AccountName' and 'AccountKey'.");
}
return new StorageSharedKeyCredential(accountName, accountKey);
}
/**
* Gets the account name associated with the request.
*
* @return The account name.
*/
public String getAccountName() {
return accountName;
}
/**
* Generates the SharedKey Authorization value from information in the request.
* @param requestURL URL of the request
* @param httpMethod HTTP method being used
* @param headers Headers on the request
* @return the SharedKey authorization value
*/
public String generateAuthorizationHeader(URL requestURL, String httpMethod, Map<String, String> headers) {
String signature = StorageImplUtils.computeHMac256(accountKey,
buildStringToSign(requestURL, httpMethod, headers));
return String.format(AUTHORIZATION_HEADER_FORMAT, accountName, signature);
}
/**
* Computes a signature for the specified string using the HMAC-SHA256 algorithm.
* Package-private because it is used to generate SAS signatures.
*
* @param stringToSign The UTF-8-encoded string to sign.
* @return A {@code String} that contains the HMAC-SHA256-encoded signature.
* @throws RuntimeException If the HMAC-SHA256 algorithm isn't support, if the key isn't a valid Base64 encoded
* string, or the UTF-8 charset isn't supported.
*/
public String computeHmac256(final String stringToSign) {
return StorageImplUtils.computeHMac256(accountKey, stringToSign);
}
private String buildStringToSign(URL requestURL, String httpMethod, Map<String, String> headers) {
String contentLength = headers.get("Content-Length");
contentLength = contentLength.equals("0") ? "" : contentLength;
String dateHeader = (headers.containsKey("x-ms-date")) ? ""
: getStandardHeaderValue(headers, "Date");
return String.join("\n",
httpMethod,
getStandardHeaderValue(headers, "Content-Encoding"),
getStandardHeaderValue(headers, "Content-Language"),
contentLength,
getStandardHeaderValue(headers, "Content-MD5"),
getStandardHeaderValue(headers, "Content-Type"),
dateHeader,
getStandardHeaderValue(headers, "If-Modified-Since"),
getStandardHeaderValue(headers, "If-Match"),
getStandardHeaderValue(headers, "If-None-Match"),
getStandardHeaderValue(headers, "If-Unmodified-Since"),
getStandardHeaderValue(headers, "Range"),
getAdditionalXmsHeaders(headers),
getCanonicalizedResource(requestURL));
}
/*
* Returns an empty string if the header value is null or empty.
*/
private String getStandardHeaderValue(Map<String, String> headers, String headerName) {
final String headerValue = headers.get(headerName);
return headerValue == null ? "" : headerValue;
}
private String getCanonicalizedResource(URL requestURL) {
final StringBuilder canonicalizedResource = new StringBuilder("/");
canonicalizedResource.append(accountName);
if (requestURL.getPath().length() > 0) {
canonicalizedResource.append(requestURL.getPath());
} else {
canonicalizedResource.append('/');
}
if (requestURL.getQuery() == null) {
return canonicalizedResource.toString();
}
Map<String, String[]> queryParams = StorageImplUtils.parseQueryStringSplitValues(requestURL.getQuery());
ArrayList<String> queryParamNames = new ArrayList<>(queryParams.keySet());
Collections.sort(queryParamNames);
for (String queryParamName : queryParamNames) {
String[] queryParamValues = queryParams.get(queryParamName);
Arrays.sort(queryParamValues);
String queryParamValuesStr = String.join(",", queryParamValues);
canonicalizedResource.append("\n")
.append(queryParamName.toLowerCase(Locale.ROOT))
.append(":")
.append(queryParamValuesStr);
}
return canonicalizedResource.toString();
}
/**
* Searches for a {@link StorageSharedKeyCredential} in the passed {@link HttpPipeline}.
*
* @param httpPipeline Pipeline being searched
* @return a StorageSharedKeyCredential if the pipeline contains one, otherwise null.
*/
public static StorageSharedKeyCredential getSharedKeyCredentialFromPipeline(HttpPipeline httpPipeline) {
for (int i = 0; i < httpPipeline.getPolicyCount(); i++) {
HttpPipelinePolicy httpPipelinePolicy = httpPipeline.getPolicy(i);
if (httpPipelinePolicy instanceof StorageSharedKeyCredentialPolicy) {
StorageSharedKeyCredentialPolicy storageSharedKeyCredentialPolicy =
(StorageSharedKeyCredentialPolicy) httpPipelinePolicy;
return storageSharedKeyCredentialPolicy.sharedKeyCredential();
}
}
return null;
}
} | class StorageSharedKeyCredential {
private static final String AUTHORIZATION_HEADER_FORMAT = "SharedKey %s:%s";
private static final String ACCOUNT_NAME = "accountname";
private static final String ACCOUNT_KEY = "accountkey";
private final String accountName;
private final String accountKey;
/**
* Initializes a new instance of StorageSharedKeyCredential contains an account's name and its primary or secondary
* accountKey.
*
* @param accountName The account name associated with the request.
* @param accountKey The account access key used to authenticate the request.
*/
public StorageSharedKeyCredential(String accountName, String accountKey) {
Objects.requireNonNull(accountName, "'accountName' cannot be null.");
Objects.requireNonNull(accountKey, "'accountKey' cannot be null.");
this.accountName = accountName;
this.accountKey = accountKey;
}
/**
* Creates a SharedKey credential from the passed connection string.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.common.StorageSharedKeyCredential.fromConnectionString
*
* @param connectionString Connection string used to build the SharedKey credential.
* @return a SharedKey credential if the connection string contains AccountName and AccountKey
* @throws IllegalArgumentException If {@code connectionString} doesn't have AccountName or AccountKey.
*/
public static StorageSharedKeyCredential fromConnectionString(String connectionString) {
HashMap<String, String> connectionStringPieces = new HashMap<>();
for (String connectionStringPiece : connectionString.split(";")) {
String[] kvp = connectionStringPiece.split("=", 2);
connectionStringPieces.put(kvp[0].toLowerCase(Locale.ROOT), kvp[1]);
}
String accountName = connectionStringPieces.get(ACCOUNT_NAME);
String accountKey = connectionStringPieces.get(ACCOUNT_KEY);
if (CoreUtils.isNullOrEmpty(accountName) || CoreUtils.isNullOrEmpty(accountKey)) {
throw new IllegalArgumentException("Connection string must contain 'AccountName' and 'AccountKey'.");
}
return new StorageSharedKeyCredential(accountName, accountKey);
}
/**
* Gets the account name associated with the request.
*
* @return The account name.
*/
public String getAccountName() {
return accountName;
}
/**
* Generates the SharedKey Authorization value from information in the request.
* @param requestURL URL of the request
* @param httpMethod HTTP method being used
* @param headers Headers on the request
* @return the SharedKey authorization value
*/
public String generateAuthorizationHeader(URL requestURL, String httpMethod, Map<String, String> headers) {
String signature = StorageImplUtils.computeHMac256(accountKey,
buildStringToSign(requestURL, httpMethod, headers));
return String.format(AUTHORIZATION_HEADER_FORMAT, accountName, signature);
}
/**
* Computes a signature for the specified string using the HMAC-SHA256 algorithm.
* Package-private because it is used to generate SAS signatures.
*
* @param stringToSign The UTF-8-encoded string to sign.
* @return A {@code String} that contains the HMAC-SHA256-encoded signature.
* @throws RuntimeException If the HMAC-SHA256 algorithm isn't support, if the key isn't a valid Base64 encoded
* string, or the UTF-8 charset isn't supported.
*/
public String computeHmac256(final String stringToSign) {
return StorageImplUtils.computeHMac256(accountKey, stringToSign);
}
private String buildStringToSign(URL requestURL, String httpMethod, Map<String, String> headers) {
String contentLength = headers.get("Content-Length");
contentLength = contentLength.equals("0") ? "" : contentLength;
String dateHeader = (headers.containsKey("x-ms-date")) ? ""
: getStandardHeaderValue(headers, "Date");
return String.join("\n",
httpMethod,
getStandardHeaderValue(headers, "Content-Encoding"),
getStandardHeaderValue(headers, "Content-Language"),
contentLength,
getStandardHeaderValue(headers, "Content-MD5"),
getStandardHeaderValue(headers, "Content-Type"),
dateHeader,
getStandardHeaderValue(headers, "If-Modified-Since"),
getStandardHeaderValue(headers, "If-Match"),
getStandardHeaderValue(headers, "If-None-Match"),
getStandardHeaderValue(headers, "If-Unmodified-Since"),
getStandardHeaderValue(headers, "Range"),
getAdditionalXmsHeaders(headers),
getCanonicalizedResource(requestURL));
}
/*
* Returns an empty string if the header value is null or empty.
*/
private String getStandardHeaderValue(Map<String, String> headers, String headerName) {
final String headerValue = headers.get(headerName);
return headerValue == null ? "" : headerValue;
}
private String getCanonicalizedResource(URL requestURL) {
final StringBuilder canonicalizedResource = new StringBuilder("/");
canonicalizedResource.append(accountName);
if (requestURL.getPath().length() > 0) {
canonicalizedResource.append(requestURL.getPath());
} else {
canonicalizedResource.append('/');
}
if (requestURL.getQuery() == null) {
return canonicalizedResource.toString();
}
Map<String, String[]> queryParams = StorageImplUtils.parseQueryStringSplitValues(requestURL.getQuery());
ArrayList<String> queryParamNames = new ArrayList<>(queryParams.keySet());
Collections.sort(queryParamNames);
for (String queryParamName : queryParamNames) {
String[] queryParamValues = queryParams.get(queryParamName);
Arrays.sort(queryParamValues);
String queryParamValuesStr = String.join(",", queryParamValues);
canonicalizedResource.append("\n")
.append(queryParamName.toLowerCase(Locale.ROOT))
.append(":")
.append(queryParamValuesStr);
}
return canonicalizedResource.toString();
}
/**
* Searches for a {@link StorageSharedKeyCredential} in the passed {@link HttpPipeline}.
*
* @param httpPipeline Pipeline being searched
* @return a StorageSharedKeyCredential if the pipeline contains one, otherwise null.
*/
public static StorageSharedKeyCredential getSharedKeyCredentialFromPipeline(HttpPipeline httpPipeline) {
for (int i = 0; i < httpPipeline.getPolicyCount(); i++) {
HttpPipelinePolicy httpPipelinePolicy = httpPipeline.getPolicy(i);
if (httpPipelinePolicy instanceof StorageSharedKeyCredentialPolicy) {
StorageSharedKeyCredentialPolicy storageSharedKeyCredentialPolicy =
(StorageSharedKeyCredentialPolicy) httpPipelinePolicy;
return storageSharedKeyCredentialPolicy.sharedKeyCredential();
}
}
return null;
}
} |
Javadoc of Locale.ROOT looks promising - I'd give that a try. If that doesn't work Locale.US. | private String getAdditionalXmsHeaders(Map<String, String> headers) {
final List<String> xmsHeaderNameArray = headers.entrySet().stream()
.filter(entry -> entry.getKey().toLowerCase(Locale.ROOT).startsWith("x-ms-"))
.filter(entry -> entry.getValue() != null)
.map(Map.Entry::getKey)
.collect(Collectors.toList());
if (xmsHeaderNameArray.isEmpty()) {
return "";
}
/* Culture-sensitive word sort */
Collections.sort(xmsHeaderNameArray, Collator.getInstance());
final StringBuilder canonicalizedHeaders = new StringBuilder();
for (final String key : xmsHeaderNameArray) {
if (canonicalizedHeaders.length() > 0) {
canonicalizedHeaders.append('\n');
}
canonicalizedHeaders.append(key.toLowerCase(Locale.ROOT))
.append(':')
.append(headers.get(key));
}
return canonicalizedHeaders.toString();
} | Collections.sort(xmsHeaderNameArray, Collator.getInstance()); | private String getAdditionalXmsHeaders(Map<String, String> headers) {
final List<String> xmsHeaderNameArray = headers.entrySet().stream()
.filter(entry -> entry.getKey().toLowerCase(Locale.ROOT).startsWith("x-ms-"))
.filter(entry -> entry.getValue() != null)
.map(Map.Entry::getKey)
.collect(Collectors.toList());
if (xmsHeaderNameArray.isEmpty()) {
return "";
}
/* Culture-sensitive word sort */
Collections.sort(xmsHeaderNameArray, Collator.getInstance(Locale.ROOT));
final StringBuilder canonicalizedHeaders = new StringBuilder();
for (final String key : xmsHeaderNameArray) {
if (canonicalizedHeaders.length() > 0) {
canonicalizedHeaders.append('\n');
}
canonicalizedHeaders.append(key.toLowerCase(Locale.ROOT))
.append(':')
.append(headers.get(key));
}
return canonicalizedHeaders.toString();
} | class StorageSharedKeyCredential {
private static final String AUTHORIZATION_HEADER_FORMAT = "SharedKey %s:%s";
private static final String ACCOUNT_NAME = "accountname";
private static final String ACCOUNT_KEY = "accountkey";
private final String accountName;
private final String accountKey;
/**
* Initializes a new instance of StorageSharedKeyCredential contains an account's name and its primary or secondary
* accountKey.
*
* @param accountName The account name associated with the request.
* @param accountKey The account access key used to authenticate the request.
*/
public StorageSharedKeyCredential(String accountName, String accountKey) {
Objects.requireNonNull(accountName, "'accountName' cannot be null.");
Objects.requireNonNull(accountKey, "'accountKey' cannot be null.");
this.accountName = accountName;
this.accountKey = accountKey;
}
/**
* Creates a SharedKey credential from the passed connection string.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.common.StorageSharedKeyCredential.fromConnectionString
*
* @param connectionString Connection string used to build the SharedKey credential.
* @return a SharedKey credential if the connection string contains AccountName and AccountKey
* @throws IllegalArgumentException If {@code connectionString} doesn't have AccountName or AccountKey.
*/
public static StorageSharedKeyCredential fromConnectionString(String connectionString) {
HashMap<String, String> connectionStringPieces = new HashMap<>();
for (String connectionStringPiece : connectionString.split(";")) {
String[] kvp = connectionStringPiece.split("=", 2);
connectionStringPieces.put(kvp[0].toLowerCase(Locale.ROOT), kvp[1]);
}
String accountName = connectionStringPieces.get(ACCOUNT_NAME);
String accountKey = connectionStringPieces.get(ACCOUNT_KEY);
if (CoreUtils.isNullOrEmpty(accountName) || CoreUtils.isNullOrEmpty(accountKey)) {
throw new IllegalArgumentException("Connection string must contain 'AccountName' and 'AccountKey'.");
}
return new StorageSharedKeyCredential(accountName, accountKey);
}
/**
* Gets the account name associated with the request.
*
* @return The account name.
*/
public String getAccountName() {
return accountName;
}
/**
* Generates the SharedKey Authorization value from information in the request.
* @param requestURL URL of the request
* @param httpMethod HTTP method being used
* @param headers Headers on the request
* @return the SharedKey authorization value
*/
public String generateAuthorizationHeader(URL requestURL, String httpMethod, Map<String, String> headers) {
String signature = StorageImplUtils.computeHMac256(accountKey,
buildStringToSign(requestURL, httpMethod, headers));
return String.format(AUTHORIZATION_HEADER_FORMAT, accountName, signature);
}
/**
* Computes a signature for the specified string using the HMAC-SHA256 algorithm.
* Package-private because it is used to generate SAS signatures.
*
* @param stringToSign The UTF-8-encoded string to sign.
* @return A {@code String} that contains the HMAC-SHA256-encoded signature.
* @throws RuntimeException If the HMAC-SHA256 algorithm isn't support, if the key isn't a valid Base64 encoded
* string, or the UTF-8 charset isn't supported.
*/
public String computeHmac256(final String stringToSign) {
return StorageImplUtils.computeHMac256(accountKey, stringToSign);
}
private String buildStringToSign(URL requestURL, String httpMethod, Map<String, String> headers) {
String contentLength = headers.get("Content-Length");
contentLength = contentLength.equals("0") ? "" : contentLength;
String dateHeader = (headers.containsKey("x-ms-date")) ? ""
: getStandardHeaderValue(headers, "Date");
return String.join("\n",
httpMethod,
getStandardHeaderValue(headers, "Content-Encoding"),
getStandardHeaderValue(headers, "Content-Language"),
contentLength,
getStandardHeaderValue(headers, "Content-MD5"),
getStandardHeaderValue(headers, "Content-Type"),
dateHeader,
getStandardHeaderValue(headers, "If-Modified-Since"),
getStandardHeaderValue(headers, "If-Match"),
getStandardHeaderValue(headers, "If-None-Match"),
getStandardHeaderValue(headers, "If-Unmodified-Since"),
getStandardHeaderValue(headers, "Range"),
getAdditionalXmsHeaders(headers),
getCanonicalizedResource(requestURL));
}
/*
* Returns an empty string if the header value is null or empty.
*/
private String getStandardHeaderValue(Map<String, String> headers, String headerName) {
final String headerValue = headers.get(headerName);
return headerValue == null ? "" : headerValue;
}
private String getCanonicalizedResource(URL requestURL) {
final StringBuilder canonicalizedResource = new StringBuilder("/");
canonicalizedResource.append(accountName);
if (requestURL.getPath().length() > 0) {
canonicalizedResource.append(requestURL.getPath());
} else {
canonicalizedResource.append('/');
}
if (requestURL.getQuery() == null) {
return canonicalizedResource.toString();
}
Map<String, String[]> queryParams = StorageImplUtils.parseQueryStringSplitValues(requestURL.getQuery());
ArrayList<String> queryParamNames = new ArrayList<>(queryParams.keySet());
Collections.sort(queryParamNames);
for (String queryParamName : queryParamNames) {
String[] queryParamValues = queryParams.get(queryParamName);
Arrays.sort(queryParamValues);
String queryParamValuesStr = String.join(",", queryParamValues);
canonicalizedResource.append("\n")
.append(queryParamName.toLowerCase(Locale.ROOT))
.append(":")
.append(queryParamValuesStr);
}
return canonicalizedResource.toString();
}
/**
* Searches for a {@link StorageSharedKeyCredential} in the passed {@link HttpPipeline}.
*
* @param httpPipeline Pipeline being searched
* @return a StorageSharedKeyCredential if the pipeline contains one, otherwise null.
*/
public static StorageSharedKeyCredential getSharedKeyCredentialFromPipeline(HttpPipeline httpPipeline) {
for (int i = 0; i < httpPipeline.getPolicyCount(); i++) {
HttpPipelinePolicy httpPipelinePolicy = httpPipeline.getPolicy(i);
if (httpPipelinePolicy instanceof StorageSharedKeyCredentialPolicy) {
StorageSharedKeyCredentialPolicy storageSharedKeyCredentialPolicy =
(StorageSharedKeyCredentialPolicy) httpPipelinePolicy;
return storageSharedKeyCredentialPolicy.sharedKeyCredential();
}
}
return null;
}
} | class StorageSharedKeyCredential {
private static final String AUTHORIZATION_HEADER_FORMAT = "SharedKey %s:%s";
private static final String ACCOUNT_NAME = "accountname";
private static final String ACCOUNT_KEY = "accountkey";
private final String accountName;
private final String accountKey;
/**
* Initializes a new instance of StorageSharedKeyCredential contains an account's name and its primary or secondary
* accountKey.
*
* @param accountName The account name associated with the request.
* @param accountKey The account access key used to authenticate the request.
*/
public StorageSharedKeyCredential(String accountName, String accountKey) {
Objects.requireNonNull(accountName, "'accountName' cannot be null.");
Objects.requireNonNull(accountKey, "'accountKey' cannot be null.");
this.accountName = accountName;
this.accountKey = accountKey;
}
/**
* Creates a SharedKey credential from the passed connection string.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.common.StorageSharedKeyCredential.fromConnectionString
*
* @param connectionString Connection string used to build the SharedKey credential.
* @return a SharedKey credential if the connection string contains AccountName and AccountKey
* @throws IllegalArgumentException If {@code connectionString} doesn't have AccountName or AccountKey.
*/
public static StorageSharedKeyCredential fromConnectionString(String connectionString) {
HashMap<String, String> connectionStringPieces = new HashMap<>();
for (String connectionStringPiece : connectionString.split(";")) {
String[] kvp = connectionStringPiece.split("=", 2);
connectionStringPieces.put(kvp[0].toLowerCase(Locale.ROOT), kvp[1]);
}
String accountName = connectionStringPieces.get(ACCOUNT_NAME);
String accountKey = connectionStringPieces.get(ACCOUNT_KEY);
if (CoreUtils.isNullOrEmpty(accountName) || CoreUtils.isNullOrEmpty(accountKey)) {
throw new IllegalArgumentException("Connection string must contain 'AccountName' and 'AccountKey'.");
}
return new StorageSharedKeyCredential(accountName, accountKey);
}
/**
* Gets the account name associated with the request.
*
* @return The account name.
*/
public String getAccountName() {
return accountName;
}
/**
* Generates the SharedKey Authorization value from information in the request.
* @param requestURL URL of the request
* @param httpMethod HTTP method being used
* @param headers Headers on the request
* @return the SharedKey authorization value
*/
public String generateAuthorizationHeader(URL requestURL, String httpMethod, Map<String, String> headers) {
String signature = StorageImplUtils.computeHMac256(accountKey,
buildStringToSign(requestURL, httpMethod, headers));
return String.format(AUTHORIZATION_HEADER_FORMAT, accountName, signature);
}
/**
* Computes a signature for the specified string using the HMAC-SHA256 algorithm.
* Package-private because it is used to generate SAS signatures.
*
* @param stringToSign The UTF-8-encoded string to sign.
* @return A {@code String} that contains the HMAC-SHA256-encoded signature.
* @throws RuntimeException If the HMAC-SHA256 algorithm isn't support, if the key isn't a valid Base64 encoded
* string, or the UTF-8 charset isn't supported.
*/
public String computeHmac256(final String stringToSign) {
return StorageImplUtils.computeHMac256(accountKey, stringToSign);
}
private String buildStringToSign(URL requestURL, String httpMethod, Map<String, String> headers) {
String contentLength = headers.get("Content-Length");
contentLength = contentLength.equals("0") ? "" : contentLength;
String dateHeader = (headers.containsKey("x-ms-date")) ? ""
: getStandardHeaderValue(headers, "Date");
return String.join("\n",
httpMethod,
getStandardHeaderValue(headers, "Content-Encoding"),
getStandardHeaderValue(headers, "Content-Language"),
contentLength,
getStandardHeaderValue(headers, "Content-MD5"),
getStandardHeaderValue(headers, "Content-Type"),
dateHeader,
getStandardHeaderValue(headers, "If-Modified-Since"),
getStandardHeaderValue(headers, "If-Match"),
getStandardHeaderValue(headers, "If-None-Match"),
getStandardHeaderValue(headers, "If-Unmodified-Since"),
getStandardHeaderValue(headers, "Range"),
getAdditionalXmsHeaders(headers),
getCanonicalizedResource(requestURL));
}
/*
* Returns an empty string if the header value is null or empty.
*/
private String getStandardHeaderValue(Map<String, String> headers, String headerName) {
final String headerValue = headers.get(headerName);
return headerValue == null ? "" : headerValue;
}
private String getCanonicalizedResource(URL requestURL) {
final StringBuilder canonicalizedResource = new StringBuilder("/");
canonicalizedResource.append(accountName);
if (requestURL.getPath().length() > 0) {
canonicalizedResource.append(requestURL.getPath());
} else {
canonicalizedResource.append('/');
}
if (requestURL.getQuery() == null) {
return canonicalizedResource.toString();
}
Map<String, String[]> queryParams = StorageImplUtils.parseQueryStringSplitValues(requestURL.getQuery());
ArrayList<String> queryParamNames = new ArrayList<>(queryParams.keySet());
Collections.sort(queryParamNames);
for (String queryParamName : queryParamNames) {
String[] queryParamValues = queryParams.get(queryParamName);
Arrays.sort(queryParamValues);
String queryParamValuesStr = String.join(",", queryParamValues);
canonicalizedResource.append("\n")
.append(queryParamName.toLowerCase(Locale.ROOT))
.append(":")
.append(queryParamValuesStr);
}
return canonicalizedResource.toString();
}
/**
* Searches for a {@link StorageSharedKeyCredential} in the passed {@link HttpPipeline}.
*
* @param httpPipeline Pipeline being searched
* @return a StorageSharedKeyCredential if the pipeline contains one, otherwise null.
*/
public static StorageSharedKeyCredential getSharedKeyCredentialFromPipeline(HttpPipeline httpPipeline) {
for (int i = 0; i < httpPipeline.getPolicyCount(); i++) {
HttpPipelinePolicy httpPipelinePolicy = httpPipeline.getPolicy(i);
if (httpPipelinePolicy instanceof StorageSharedKeyCredentialPolicy) {
StorageSharedKeyCredentialPolicy storageSharedKeyCredentialPolicy =
(StorageSharedKeyCredentialPolicy) httpPipelinePolicy;
return storageSharedKeyCredentialPolicy.sharedKeyCredential();
}
}
return null;
}
} |
What am i supposed to do here? | void commit() {
sink.complete();
lock.lock();
while (!complete) {
try {
transferComplete.await();
} catch (InterruptedException e) {
this.lastError = new IOException(e.getMessage());
}
}
lock.unlock();
} | this.lastError = new IOException(e.getMessage()); | void commit();
/**
* Closes this output stream and releases any system resources associated with this stream. If any data remains in
* the buffer it is committed to the service.
*
* @throws IOException If an I/O error occurs.
*/
@Override
public synchronized void close() throws IOException {
try {
this.checkStreamState();
this.flush();
try {
this.commit();
} catch (final BlobStorageException e) {
throw new IOException(e);
}
/* Need this check because for block blob the buffered upload error only manifests itself after commit is
called */
if (this.lastError != null) {
throw lastError;
}
} finally {
this.lastError = new IOException(Constants.STREAM_CLOSED);
}
} | class BlobOutputStream extends StorageOutputStream {
BlobOutputStream(final int writeThreshold) {
super(writeThreshold);
}
static BlobOutputStream appendBlobOutputStream(final AppendBlobAsyncClient client,
final AppendBlobRequestConditions appendBlobRequestConditions) {
return new AppendBlobOutputStream(client, appendBlobRequestConditions);
}
/**
* Creates a block blob output stream from a BlobAsyncClient
* @param client {@link BlobAsyncClient} The blob client.
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the destination blob.
* @param requestConditions {@link BlobRequestConditions}
* @return {@link BlobOutputStream} associated with the blob.
*/
public static BlobOutputStream blockBlobOutputStream(final BlobAsyncClient client,
final ParallelTransferOptions parallelTransferOptions, final BlobHttpHeaders headers,
final Map<String, String> metadata, final AccessTier tier, final BlobRequestConditions requestConditions) {
return new BlockBlobOutputStream(client, parallelTransferOptions, headers, metadata, tier, requestConditions);
}
static BlobOutputStream pageBlobOutputStream(final PageBlobAsyncClient client, final PageRange pageRange,
final BlobRequestConditions requestConditions) {
return new PageBlobOutputStream(client, pageRange, requestConditions);
}
abstract
private static final class AppendBlobOutputStream extends BlobOutputStream {
private static final String INVALID_BLOCK_SIZE =
"Block data should not exceed BlockBlobURL.MAX_STAGE_BLOCK_BYTES";
private final AppendBlobRequestConditions appendBlobRequestConditions;
private final AppendBlobAsyncClient client;
private AppendBlobOutputStream(final AppendBlobAsyncClient client,
final AppendBlobRequestConditions appendBlobRequestConditions) {
super(AppendBlobClient.MAX_APPEND_BLOCK_BYTES);
this.client = client;
this.appendBlobRequestConditions = (appendBlobRequestConditions == null)
? new AppendBlobRequestConditions() : appendBlobRequestConditions;
if (this.appendBlobRequestConditions.getAppendPosition() == null) {
this.appendBlobRequestConditions.setAppendPosition(client.getProperties().block().getBlobSize());
}
}
private Mono<Void> appendBlock(Flux<ByteBuffer> blockData, long writeLength) {
long newAppendOffset = appendBlobRequestConditions.getAppendPosition() + writeLength;
return client.appendBlockWithResponse(blockData, writeLength, null, appendBlobRequestConditions)
.doOnNext(ignored -> appendBlobRequestConditions.setAppendPosition(newAppendOffset))
.then()
.onErrorResume(t -> t instanceof IOException || t instanceof BlobStorageException, e -> {
this.lastError = new IOException(e);
return Mono.empty();
});
}
@Override
protected Mono<Void> dispatchWrite(byte[] data, int writeLength, long offset) {
if (writeLength == 0) {
return Mono.empty();
}
if (appendBlobRequestConditions.getMaxSize() != null
&& appendBlobRequestConditions.getAppendPosition() > appendBlobRequestConditions.getMaxSize()) {
this.lastError = new IOException(INVALID_BLOCK_SIZE);
return Mono.error(this.lastError);
}
Flux<ByteBuffer> fbb = Flux.range(0, 1).concatMap(pos -> Mono.fromCallable(() ->
ByteBuffer.wrap(data, (int) offset, writeLength)));
return this.appendBlock(fbb.subscribeOn(Schedulers.elastic()), writeLength);
}
@Override
void commit() {
}
}
private static final class BlockBlobOutputStream extends BlobOutputStream {
private FluxSink<ByteBuffer> sink;
private final Lock lock;
private final Condition transferComplete;
boolean complete;
private BlockBlobOutputStream(final BlobAsyncClient client,
final ParallelTransferOptions parallelTransferOptions, final BlobHttpHeaders headers,
final Map<String, String> metadata, final AccessTier tier, final BlobRequestConditions requestConditions) {
super(BlockBlobClient.MAX_STAGE_BLOCK_BYTES);
this.lock = new ReentrantLock();
this.transferComplete = lock.newCondition();
Flux<ByteBuffer> fbb = Flux.create((FluxSink<ByteBuffer> sink) -> this.sink = sink);
/* Subscribe by upload takes too long. We need to subscribe so that the sink is actually created. Since
this subscriber doesn't do anything and no data has started flowing, there are no drawbacks to this extra
subscribe. */
fbb.subscribe();
client.uploadWithResponse(fbb, parallelTransferOptions, headers, metadata, tier, requestConditions)
.onErrorResume(BlobStorageException.class, e -> {
this.lastError = new IOException(e);
return Mono.empty();
})
.doOnTerminate(() -> {
lock.lock();
complete = true;
transferComplete.signal();
lock.unlock();
})
.subscribe();
}
@Override
void commit() {
sink.complete();
lock.lock();
while (!complete) {
try {
transferComplete.await();
} catch (InterruptedException e) {
this.lastError = new IOException(e.getMessage());
}
}
lock.unlock();
}
@Override
protected void writeInternal(final byte[] data, int offset, int length) {
sink.next(ByteBuffer.wrap(data, offset, length));
}
@Override
protected Mono<Void> dispatchWrite(byte[] data, int writeLength, long offset) {
return Mono.empty();
}
}
private static final class PageBlobOutputStream extends BlobOutputStream {
private static final String INVALID_NUMBER_OF_BYTES_IN_THE_BUFFER =
"Page data must be a multiple of 512 bytes. Buffer currently contains %d bytes.";
private final ClientLogger logger = new ClientLogger(PageBlobOutputStream.class);
private final PageBlobAsyncClient client;
private final PageBlobRequestConditions pageBlobRequestConditions;
private final PageRange pageRange;
private PageBlobOutputStream(final PageBlobAsyncClient client, final PageRange pageRange,
final BlobRequestConditions blobRequestConditions) {
super(PageBlobClient.MAX_PUT_PAGES_BYTES);
this.client = client;
this.pageRange = pageRange;
if (blobRequestConditions != null) {
this.pageBlobRequestConditions = new PageBlobRequestConditions()
.setLeaseId(blobRequestConditions.getLeaseId())
.setIfMatch(blobRequestConditions.getIfMatch())
.setIfNoneMatch(blobRequestConditions.getIfNoneMatch())
.setIfModifiedSince(blobRequestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(blobRequestConditions.getIfUnmodifiedSince());
} else {
this.pageBlobRequestConditions = null;
}
}
private Mono<Void> writePages(Flux<ByteBuffer> pageData, int length, long offset) {
return client.uploadPagesWithResponse(new PageRange().setStart(offset).setEnd(offset + length - 1),
pageData, null, pageBlobRequestConditions)
.then()
.onErrorResume(BlobStorageException.class, e -> {
this.lastError = new IOException(e);
return Mono.empty();
});
}
@Override
protected Mono<Void> dispatchWrite(byte[] data, int writeLength, long offset) {
if (writeLength == 0) {
return Mono.empty();
}
if (writeLength % PageBlobAsyncClient.PAGE_BYTES != 0) {
return Mono.error(new IOException(String.format(INVALID_NUMBER_OF_BYTES_IN_THE_BUFFER,
writeLength)));
}
Flux<ByteBuffer> fbb = Flux.range(0, 1)
.concatMap(pos -> Mono.fromCallable(() -> ByteBuffer.wrap(data, (int) offset, writeLength)));
long pageOffset = pageRange.getStart();
if (pageOffset + writeLength - 1 > pageRange.getEnd()) {
throw logger.logExceptionAsError(
new RuntimeException("The input data length is larger than the page range."));
}
pageRange.setStart(pageRange.getStart() + writeLength);
return this.writePages(fbb.subscribeOn(Schedulers.elastic()), writeLength, pageOffset);
}
@Override
void commit() {
}
}
} | class BlobOutputStream extends StorageOutputStream {
BlobOutputStream(final int writeThreshold) {
super(writeThreshold);
}
static BlobOutputStream appendBlobOutputStream(final AppendBlobAsyncClient client,
final AppendBlobRequestConditions appendBlobRequestConditions) {
return new AppendBlobOutputStream(client, appendBlobRequestConditions);
}
/**
* Creates a block blob output stream from a BlobAsyncClient
* @param client {@link BlobAsyncClient} The blob client.
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the destination blob.
* @param requestConditions {@link BlobRequestConditions}
* @return {@link BlobOutputStream} associated with the blob.
*/
public static BlobOutputStream blockBlobOutputStream(final BlobAsyncClient client,
final ParallelTransferOptions parallelTransferOptions, final BlobHttpHeaders headers,
final Map<String, String> metadata, final AccessTier tier, final BlobRequestConditions requestConditions) {
return new BlockBlobOutputStream(client, parallelTransferOptions, headers, metadata, tier, requestConditions);
}
static BlobOutputStream pageBlobOutputStream(final PageBlobAsyncClient client, final PageRange pageRange,
final BlobRequestConditions requestConditions) {
return new PageBlobOutputStream(client, pageRange, requestConditions);
}
abstract
private static final class AppendBlobOutputStream extends BlobOutputStream {
private static final String INVALID_BLOCK_SIZE =
"Block data should not exceed BlockBlobURL.MAX_STAGE_BLOCK_BYTES";
private final AppendBlobRequestConditions appendBlobRequestConditions;
private final AppendBlobAsyncClient client;
private AppendBlobOutputStream(final AppendBlobAsyncClient client,
final AppendBlobRequestConditions appendBlobRequestConditions) {
super(AppendBlobClient.MAX_APPEND_BLOCK_BYTES);
this.client = client;
this.appendBlobRequestConditions = (appendBlobRequestConditions == null)
? new AppendBlobRequestConditions() : appendBlobRequestConditions;
if (this.appendBlobRequestConditions.getAppendPosition() == null) {
this.appendBlobRequestConditions.setAppendPosition(client.getProperties().block().getBlobSize());
}
}
private Mono<Void> appendBlock(Flux<ByteBuffer> blockData, long writeLength) {
long newAppendOffset = appendBlobRequestConditions.getAppendPosition() + writeLength;
return client.appendBlockWithResponse(blockData, writeLength, null, appendBlobRequestConditions)
.doOnNext(ignored -> appendBlobRequestConditions.setAppendPosition(newAppendOffset))
.then()
.onErrorResume(t -> t instanceof IOException || t instanceof BlobStorageException, e -> {
this.lastError = new IOException(e);
return Mono.empty();
});
}
@Override
protected Mono<Void> dispatchWrite(byte[] data, int writeLength, long offset) {
if (writeLength == 0) {
return Mono.empty();
}
if (appendBlobRequestConditions.getMaxSize() != null
&& appendBlobRequestConditions.getAppendPosition() > appendBlobRequestConditions.getMaxSize()) {
this.lastError = new IOException(INVALID_BLOCK_SIZE);
return Mono.error(this.lastError);
}
Flux<ByteBuffer> fbb = Flux.range(0, 1).concatMap(pos -> Mono.fromCallable(() ->
ByteBuffer.wrap(data, (int) offset, writeLength)));
return this.appendBlock(fbb.subscribeOn(Schedulers.elastic()), writeLength);
}
@Override
void commit() {
}
}
private static final class BlockBlobOutputStream extends BlobOutputStream {
private FluxSink<ByteBuffer> sink;
private final Lock lock;
private final Condition transferComplete;
boolean complete;
private BlockBlobOutputStream(final BlobAsyncClient client,
final ParallelTransferOptions parallelTransferOptions, final BlobHttpHeaders headers,
final Map<String, String> metadata, final AccessTier tier, final BlobRequestConditions requestConditions) {
super(BlockBlobClient.MAX_STAGE_BLOCK_BYTES);
this.lock = new ReentrantLock();
this.transferComplete = lock.newCondition();
Flux<ByteBuffer> fbb = Flux.create((FluxSink<ByteBuffer> sink) -> this.sink = sink);
/* Subscribe by upload takes too long. We need to subscribe so that the sink is actually created. Since
this subscriber doesn't do anything and no data has started flowing, there are no drawbacks to this extra
subscribe. */
fbb.subscribe();
client.uploadWithResponse(fbb, parallelTransferOptions, headers, metadata, tier, requestConditions)
.onErrorResume(BlobStorageException.class, e -> {
this.lastError = new IOException(e);
return Mono.empty();
})
.doOnTerminate(() -> {
lock.lock();
try {
complete = true;
transferComplete.signal();
} finally {
lock.unlock();
}
})
.subscribe();
}
@Override
void commit() {
lock.lock();
try {
sink.complete(); /* Allow upload task to try to complete. */
while (!complete) {
transferComplete.await();
}
} catch (InterruptedException e) {
this.lastError = new IOException(e.getMessage());
} finally {
lock.unlock();
}
}
@Override
protected void writeInternal(final byte[] data, int offset, int length) {
sink.next(ByteBuffer.wrap(data, offset, length));
}
@Override
protected Mono<Void> dispatchWrite(byte[] data, int writeLength, long offset) {
return Mono.empty();
}
}
private static final class PageBlobOutputStream extends BlobOutputStream {
private static final String INVALID_NUMBER_OF_BYTES_IN_THE_BUFFER =
"Page data must be a multiple of 512 bytes. Buffer currently contains %d bytes.";
private final ClientLogger logger = new ClientLogger(PageBlobOutputStream.class);
private final PageBlobAsyncClient client;
private final PageBlobRequestConditions pageBlobRequestConditions;
private final PageRange pageRange;
private PageBlobOutputStream(final PageBlobAsyncClient client, final PageRange pageRange,
final BlobRequestConditions blobRequestConditions) {
super(PageBlobClient.MAX_PUT_PAGES_BYTES);
this.client = client;
this.pageRange = pageRange;
if (blobRequestConditions != null) {
this.pageBlobRequestConditions = new PageBlobRequestConditions()
.setLeaseId(blobRequestConditions.getLeaseId())
.setIfMatch(blobRequestConditions.getIfMatch())
.setIfNoneMatch(blobRequestConditions.getIfNoneMatch())
.setIfModifiedSince(blobRequestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(blobRequestConditions.getIfUnmodifiedSince());
} else {
this.pageBlobRequestConditions = null;
}
}
private Mono<Void> writePages(Flux<ByteBuffer> pageData, int length, long offset) {
return client.uploadPagesWithResponse(new PageRange().setStart(offset).setEnd(offset + length - 1),
pageData, null, pageBlobRequestConditions)
.then()
.onErrorResume(BlobStorageException.class, e -> {
this.lastError = new IOException(e);
return Mono.empty();
});
}
@Override
protected Mono<Void> dispatchWrite(byte[] data, int writeLength, long offset) {
if (writeLength == 0) {
return Mono.empty();
}
if (writeLength % PageBlobAsyncClient.PAGE_BYTES != 0) {
return Mono.error(new IOException(String.format(INVALID_NUMBER_OF_BYTES_IN_THE_BUFFER,
writeLength)));
}
Flux<ByteBuffer> fbb = Flux.range(0, 1)
.concatMap(pos -> Mono.fromCallable(() -> ByteBuffer.wrap(data, (int) offset, writeLength)));
long pageOffset = pageRange.getStart();
if (pageOffset + writeLength - 1 > pageRange.getEnd()) {
throw logger.logExceptionAsError(
new RuntimeException("The input data length is larger than the page range."));
}
pageRange.setStart(pageRange.getStart() + writeLength);
return this.writePages(fbb.subscribeOn(Schedulers.elastic()), writeLength, pageOffset);
}
@Override
void commit() {
}
}
} |
If an input is `null` and is expected to be non-null, we throw `NullPointerException` as per Java guidelines. | public LocalCryptographyAsyncClient buildAsyncClient() {
if (jsonWebKey == null) {
throw logger.logExceptionAsError(new IllegalStateException(
"Json Web Key is required to create local cryptography client"));
}
return new LocalCryptographyAsyncClient(jsonWebKey);
} | } | public LocalCryptographyAsyncClient buildAsyncClient() {
if (jsonWebKey == null) {
throw logger.logExceptionAsError(new NullPointerException(
"Json Web Key is required to create local cryptography client"));
}
return new LocalCryptographyAsyncClient(jsonWebKey);
} | class LocalCryptographyClientBuilder {
private final ClientLogger logger = new ClientLogger(LocalCryptographyClientBuilder.class);
private JsonWebKey jsonWebKey;
/**
* Creates a {@link LocalCryptographyClient} based on options set in the builder.
* Every time {@code buildClient()} is called, a new instance of {@link LocalCryptographyClient} is created.
*
* <p> The LocalCryptographyClientBuilder | class LocalCryptographyClientBuilder {
private final ClientLogger logger = new ClientLogger(LocalCryptographyClientBuilder.class);
private JsonWebKey jsonWebKey;
/**
* Creates a {@link LocalCryptographyClient} based on options set in the builder.
* Every time {@code buildClient()} is called, a new instance of {@link LocalCryptographyClient} is created.
*
* <p> The LocalCryptographyClientBuilder |
Null or empty is two checks. NullPointer for when getLockToken() == null and if it is empty, illegal argument exception. | public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (CoreUtils.isNullOrEmpty(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
}
UUID lockTokenUUID;
try {
lockTokenUUID = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_LOCK_TOKEN_STRING, lockToken.getLockToken())));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(lockTokenUUID))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
} | } else if (CoreUtils.isNullOrEmpty(lockToken.getLockToken())) { | public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'message' cannot be null."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (CoreUtils.isNullOrEmpty(message.getLockToken())) {
return Mono.error(logger.logExceptionAsError(new IllegalArgumentException(
"'message.lockToken' cannot be null.")));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} |
shouldn't we propagate the same error back? It is an illegalargument exception because it couldn't be converted to a UUID. It's not dependent on the state of this receiver. | public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (CoreUtils.isNullOrEmpty(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
}
UUID lockTokenUUID;
try {
lockTokenUUID = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_LOCK_TOKEN_STRING, lockToken.getLockToken())));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(lockTokenUUID))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
} | return monoError(logger, new IllegalStateException( | public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'message' cannot be null."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (CoreUtils.isNullOrEmpty(message.getLockToken())) {
return Mono.error(logger.logExceptionAsError(new IllegalArgumentException(
"'message.lockToken' cannot be null.")));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} |
You'll get a warning to use logger.logErrorAsException then throw. | public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUUID;
try {
lockTokenUUID = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
throw ex;
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(lockTokenUUID))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
} | throw ex; | public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (CoreUtils.isNullOrEmpty(message.getLockToken())) {
return Mono.error(logger.logExceptionAsError(new IllegalArgumentException(
"'message.lockToken' cannot be null.")));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} |
lockTokenUUID -> lockTokenUuid | public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUUID;
try {
lockTokenUUID = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
throw ex;
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(lockTokenUUID))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
} | serviceBusManagementNode.renewMessageLock(lockTokenUUID)) | public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (CoreUtils.isNullOrEmpty(message.getLockToken())) {
return Mono.error(logger.logExceptionAsError(new IllegalArgumentException(
"'message.lockToken' cannot be null.")));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} |
Why set both? Will it affect line 413 when you call both regardless create or update? | public DnsZoneImpl withETagCheck() {
this.eTagState.withImplicitETagCheckOnCreate();
this.eTagState.withImplicitETagCheckOnUpdate();
return this;
} | this.eTagState.withImplicitETagCheckOnUpdate(); | public DnsZoneImpl withETagCheck() {
this.eTagState.withImplicitETagCheckOnCreateOrUpdate(this.isInCreateMode());
return this;
} | class DnsZoneImpl
extends GroupableResourceImpl<
DnsZone,
ZoneInner,
DnsZoneImpl,
DnsZoneManager>
implements
DnsZone,
DnsZone.Definition,
DnsZone.Update {
private ARecordSets aRecordSets;
private AaaaRecordSets aaaaRecordSets;
private CaaRecordSets caaRecordSets;
private CNameRecordSets cnameRecordSets;
private MXRecordSets mxRecordSets;
private NSRecordSets nsRecordSets;
private PtrRecordSets ptrRecordSets;
private SrvRecordSets srvRecordSets;
private TxtRecordSets txtRecordSets;
private DnsRecordSetsImpl recordSets;
private final ETagState eTagState = new ETagState();
DnsZoneImpl(String name, final ZoneInner innerModel, final DnsZoneManager manager) {
super(name, innerModel, manager);
this.recordSets = new DnsRecordSetsImpl(this);
initRecordSets();
if (isInCreateMode()) {
this.inner().withZoneType(ZoneType.PUBLIC);
}
}
@Override
public long maxNumberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().maxNumberOfRecordSets());
}
@Override
public long numberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().numberOfRecordSets());
}
@Override
public String eTag() {
return this.inner().etag();
}
@Override
public ZoneType accessType() {
return this.inner().zoneType();
}
@Override
public List<String> registrationVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().registrationVirtualNetworks() != null) {
for (SubResource sb : this.inner().registrationVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public List<String> resolutionVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().resolutionVirtualNetworks() != null) {
for (SubResource sb : this.inner().resolutionVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets() {
return this.listRecordSetsIntern(null, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix) {
return this.listRecordSetsIntern(recordSetNameSuffix, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(int pageSize) {
return this.listRecordSetsIntern(null, pageSize);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix, int pageSize) {
return this.listRecordSetsIntern(recordSetNameSuffix, pageSize);
}
@Override
public List<String> nameServers() {
if (this.inner() == null) {
return new ArrayList<>();
}
return this.inner().nameServers();
}
@Override
public ARecordSets aRecordSets() {
return this.aRecordSets;
}
@Override
public AaaaRecordSets aaaaRecordSets() {
return this.aaaaRecordSets;
}
@Override
public CaaRecordSets caaRecordSets() {
return this.caaRecordSets;
}
@Override
public CNameRecordSets cNameRecordSets() {
return this.cnameRecordSets;
}
@Override
public MXRecordSets mxRecordSets() {
return this.mxRecordSets;
}
@Override
public NSRecordSets nsRecordSets() {
return this.nsRecordSets;
}
@Override
public PtrRecordSets ptrRecordSets() {
return this.ptrRecordSets;
}
@Override
public SrvRecordSets srvRecordSets() {
return this.srvRecordSets;
}
@Override
public TxtRecordSets txtRecordSets() {
return this.txtRecordSets;
}
@Override
public SoaRecordSet getSoaRecordSet() {
RecordSetInner inner = this.manager().inner().recordSets().get(this.resourceGroupName(), this.name(), "@", RecordType.SOA);
if (inner == null) {
return null;
}
return new SoaRecordSetImpl(inner.getName(), this, inner);
}
@Override
public DnsRecordSetImpl defineARecordSet(String name) {
return recordSets.defineARecordSet(name);
}
@Override
public DnsRecordSetImpl defineAaaaRecordSet(String name) {
return recordSets.defineAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl defineCaaRecordSet(String name) {
return recordSets.defineCaaRecordSet(name);
}
@Override
public DnsZoneImpl withCNameRecordSet(String name, String alias) {
recordSets.withCNameRecordSet(name, alias);
return this;
}
@Override
public DnsRecordSetImpl defineCNameRecordSet(String name) {
return recordSets.defineCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl defineMXRecordSet(String name) {
return recordSets.defineMXRecordSet(name);
}
@Override
public DnsRecordSetImpl defineNSRecordSet(String name) {
return recordSets.defineNSRecordSet(name);
}
@Override
public DnsRecordSetImpl definePtrRecordSet(String name) {
return recordSets.definePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl defineSrvRecordSet(String name) {
return recordSets.defineSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl defineTxtRecordSet(String name) {
return recordSets.defineTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateARecordSet(String name) {
return recordSets.updateARecordSet(name);
}
@Override
public DnsRecordSetImpl updateAaaaRecordSet(String name) {
return recordSets.updateAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCaaRecordSet(String name) {
return recordSets.updateCaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateMXRecordSet(String name) {
return recordSets.updateMXRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCNameRecordSet(String name) {
return recordSets.updateCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl updateNSRecordSet(String name) {
return recordSets.updateNSRecordSet(name);
}
@Override
public DnsRecordSetImpl updatePtrRecordSet(String name) {
return recordSets.updatePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSrvRecordSet(String name) {
return recordSets.updateSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl updateTxtRecordSet(String name) {
return recordSets.updateTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSoaRecord() {
return recordSets.updateSoaRecordSet();
}
@Override
public DnsZoneImpl withoutARecordSet(String name) {
return this.withoutARecordSet(name, null);
}
@Override
public DnsZoneImpl withoutARecordSet(String name, String eTag) {
recordSets.withoutARecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name) {
return this.withoutAaaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name, String eTag) {
recordSets.withoutAaaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name) {
return this.withoutCaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name, String eTag) {
recordSets.withoutCaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name) {
return this.withoutCNameRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name, String eTag) {
recordSets.withoutCNameRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name) {
return this.withoutMXRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name, String eTag) {
recordSets.withoutMXRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name) {
return this.withoutNSRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name, String eTag) {
recordSets.withoutNSRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name) {
return this.withoutPtrRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name, String eTag) {
recordSets.withoutPtrRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name) {
return this.withoutSrvRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name, String eTag) {
recordSets.withoutSrvRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name) {
return this.withoutTxtRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name, String eTag) {
recordSets.withoutTxtRecordSet(name, eTag);
return this;
}
@Override
@Override
public DnsZoneImpl withETagCheck(String eTagValue) {
this.eTagState.withExplicitETagCheckOnUpdate(eTagValue);
return this;
}
@Override
public Mono<DnsZone> createResourceAsync() {
return Mono.just(this)
.flatMap(self -> self.manager().inner().zones().createOrUpdateAsync(self.resourceGroupName(),
self.name(), self.inner(), eTagState.ifMatchValueOnUpdate(self.inner().etag()), eTagState.ifNonMatchValueOnCreate()))
.map(innerToFluentMap(this))
.map(dnsZone -> {
this.eTagState.clear();
return dnsZone;
});
}
@Override
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) {
return Mono.just(true)
.map(ignored -> {
recordSets.clear();
return ignored;
})
.then();
}
@Override
public Mono<DnsZone> refreshAsync() {
return super.refreshAsync()
.map(dnsZone -> {
DnsZoneImpl impl = (DnsZoneImpl) dnsZone;
impl.initRecordSets();
return impl;
});
}
@Override
protected Mono<ZoneInner> getInnerAsync() {
return this.manager().inner().zones().getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
private void initRecordSets() {
this.aRecordSets = new ARecordSetsImpl(this);
this.aaaaRecordSets = new AaaaRecordSetsImpl(this);
this.caaRecordSets = new CaaRecordSetsImpl(this);
this.cnameRecordSets = new CNameRecordSetsImpl(this);
this.mxRecordSets = new MXRecordSetsImpl(this);
this.nsRecordSets = new NSRecordSetsImpl(this);
this.ptrRecordSets = new PtrRecordSetsImpl(this);
this.srvRecordSets = new SrvRecordSetsImpl(this);
this.txtRecordSets = new TxtRecordSetsImpl(this);
this.recordSets.clear();
}
private PagedIterable<DnsRecordSet> listRecordSetsIntern(String recordSetSuffix, Integer pageSize) {
final DnsZoneImpl self = this;
PagedFlux<DnsRecordSet> recordSets = PagedConverter.flatMapPage(
this.manager().inner().recordSets().listByDnsZoneAsync(this.resourceGroupName(), this.name(), pageSize, recordSetSuffix),
inner -> {
DnsRecordSet recordSet = new DnsRecordSetImpl(inner.getName(), inner.getType(), self, inner);
switch (recordSet.recordType()) {
case A:
return Mono.just(new ARecordSetImpl(inner.getName(), self, inner));
case AAAA:
return Mono.just(new AaaaRecordSetImpl(inner.getName(), self, inner));
case CAA:
return Mono.just(new CaaRecordSetImpl(inner.getName(), self, inner));
case CNAME:
return Mono.just(new CNameRecordSetImpl(inner.getName(), self, inner));
case MX:
return Mono.just(new MXRecordSetImpl(inner.getName(), self, inner));
case NS:
return Mono.just(new NSRecordSetImpl(inner.getName(), self, inner));
case PTR:
return Mono.just(new PtrRecordSetImpl(inner.getName(), self, inner));
case SOA:
return Mono.just(new SoaRecordSetImpl(inner.getName(), self, inner));
case SRV:
return Mono.just(new SrvRecordSetImpl(inner.getName(), self, inner));
case TXT:
return Mono.just(new TxtRecordSetImpl(inner.getName(), self, inner));
default:
return Mono.just(recordSet);
}
});
return new PagedIterable<>(recordSets);
}
@Override
public DnsZoneImpl withPublicAccess() {
this.inner().withZoneType(ZoneType.PUBLIC);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess() {
this.inner().withZoneType(ZoneType.PRIVATE);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess(List<String> registrationVirtualNetworkIds, List<String> resolutionVirtualNetworkIds) {
this.withPrivateAccess();
this.inner().withRegistrationVirtualNetworks(new ArrayList<>());
this.inner().withResolutionVirtualNetworks(new ArrayList<>());
for (String rvnId : registrationVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().registrationVirtualNetworks().add(sb);
}
for (String rvnId : resolutionVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().resolutionVirtualNetworks().add(sb);
}
return this;
}
} | class DnsZoneImpl
extends GroupableResourceImpl<
DnsZone,
ZoneInner,
DnsZoneImpl,
DnsZoneManager>
implements
DnsZone,
DnsZone.Definition,
DnsZone.Update {
private ARecordSets aRecordSets;
private AaaaRecordSets aaaaRecordSets;
private CaaRecordSets caaRecordSets;
private CNameRecordSets cnameRecordSets;
private MXRecordSets mxRecordSets;
private NSRecordSets nsRecordSets;
private PtrRecordSets ptrRecordSets;
private SrvRecordSets srvRecordSets;
private TxtRecordSets txtRecordSets;
private DnsRecordSetsImpl recordSets;
private final ETagState eTagState = new ETagState();
DnsZoneImpl(String name, final ZoneInner innerModel, final DnsZoneManager manager) {
super(name, innerModel, manager);
this.recordSets = new DnsRecordSetsImpl(this);
initRecordSets();
if (isInCreateMode()) {
this.inner().withZoneType(ZoneType.PUBLIC);
}
}
@Override
public long maxNumberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().maxNumberOfRecordSets());
}
@Override
public long numberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().numberOfRecordSets());
}
@Override
public String eTag() {
return this.inner().etag();
}
@Override
public ZoneType accessType() {
return this.inner().zoneType();
}
@Override
public List<String> registrationVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().registrationVirtualNetworks() != null) {
for (SubResource sb : this.inner().registrationVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public List<String> resolutionVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().resolutionVirtualNetworks() != null) {
for (SubResource sb : this.inner().resolutionVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets() {
return this.listRecordSetsIntern(null, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix) {
return this.listRecordSetsIntern(recordSetNameSuffix, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(int pageSize) {
return this.listRecordSetsIntern(null, pageSize);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix, int pageSize) {
return this.listRecordSetsIntern(recordSetNameSuffix, pageSize);
}
@Override
public List<String> nameServers() {
if (this.inner() == null) {
return new ArrayList<>();
}
return this.inner().nameServers();
}
@Override
public ARecordSets aRecordSets() {
return this.aRecordSets;
}
@Override
public AaaaRecordSets aaaaRecordSets() {
return this.aaaaRecordSets;
}
@Override
public CaaRecordSets caaRecordSets() {
return this.caaRecordSets;
}
@Override
public CNameRecordSets cNameRecordSets() {
return this.cnameRecordSets;
}
@Override
public MXRecordSets mxRecordSets() {
return this.mxRecordSets;
}
@Override
public NSRecordSets nsRecordSets() {
return this.nsRecordSets;
}
@Override
public PtrRecordSets ptrRecordSets() {
return this.ptrRecordSets;
}
@Override
public SrvRecordSets srvRecordSets() {
return this.srvRecordSets;
}
@Override
public TxtRecordSets txtRecordSets() {
return this.txtRecordSets;
}
@Override
public SoaRecordSet getSoaRecordSet() {
RecordSetInner inner = this.manager().inner().recordSets().get(this.resourceGroupName(), this.name(), "@", RecordType.SOA);
if (inner == null) {
return null;
}
return new SoaRecordSetImpl(inner.getName(), this, inner);
}
@Override
public DnsRecordSetImpl defineARecordSet(String name) {
return recordSets.defineARecordSet(name);
}
@Override
public DnsRecordSetImpl defineAaaaRecordSet(String name) {
return recordSets.defineAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl defineCaaRecordSet(String name) {
return recordSets.defineCaaRecordSet(name);
}
@Override
public DnsZoneImpl withCNameRecordSet(String name, String alias) {
recordSets.withCNameRecordSet(name, alias);
return this;
}
@Override
public DnsRecordSetImpl defineCNameRecordSet(String name) {
return recordSets.defineCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl defineMXRecordSet(String name) {
return recordSets.defineMXRecordSet(name);
}
@Override
public DnsRecordSetImpl defineNSRecordSet(String name) {
return recordSets.defineNSRecordSet(name);
}
@Override
public DnsRecordSetImpl definePtrRecordSet(String name) {
return recordSets.definePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl defineSrvRecordSet(String name) {
return recordSets.defineSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl defineTxtRecordSet(String name) {
return recordSets.defineTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateARecordSet(String name) {
return recordSets.updateARecordSet(name);
}
@Override
public DnsRecordSetImpl updateAaaaRecordSet(String name) {
return recordSets.updateAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCaaRecordSet(String name) {
return recordSets.updateCaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateMXRecordSet(String name) {
return recordSets.updateMXRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCNameRecordSet(String name) {
return recordSets.updateCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl updateNSRecordSet(String name) {
return recordSets.updateNSRecordSet(name);
}
@Override
public DnsRecordSetImpl updatePtrRecordSet(String name) {
return recordSets.updatePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSrvRecordSet(String name) {
return recordSets.updateSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl updateTxtRecordSet(String name) {
return recordSets.updateTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSoaRecord() {
return recordSets.updateSoaRecordSet();
}
@Override
public DnsZoneImpl withoutARecordSet(String name) {
return this.withoutARecordSet(name, null);
}
@Override
public DnsZoneImpl withoutARecordSet(String name, String eTag) {
recordSets.withoutARecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name) {
return this.withoutAaaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name, String eTag) {
recordSets.withoutAaaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name) {
return this.withoutCaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name, String eTag) {
recordSets.withoutCaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name) {
return this.withoutCNameRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name, String eTag) {
recordSets.withoutCNameRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name) {
return this.withoutMXRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name, String eTag) {
recordSets.withoutMXRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name) {
return this.withoutNSRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name, String eTag) {
recordSets.withoutNSRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name) {
return this.withoutPtrRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name, String eTag) {
recordSets.withoutPtrRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name) {
return this.withoutSrvRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name, String eTag) {
recordSets.withoutSrvRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name) {
return this.withoutTxtRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name, String eTag) {
recordSets.withoutTxtRecordSet(name, eTag);
return this;
}
@Override
@Override
public DnsZoneImpl withETagCheck(String eTagValue) {
this.eTagState.withExplicitETagCheckOnUpdate(eTagValue);
return this;
}
@Override
public Mono<DnsZone> createResourceAsync() {
return Mono.just(this)
.flatMap(self -> self.manager().inner().zones().createOrUpdateAsync(self.resourceGroupName(),
self.name(), self.inner(), eTagState.ifMatchValueOnUpdate(self.inner().etag()), eTagState.ifNonMatchValueOnCreate()))
.map(innerToFluentMap(this))
.map(dnsZone -> {
this.eTagState.clear();
return dnsZone;
});
}
@Override
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) {
return Mono.just(true)
.map(ignored -> {
recordSets.clear();
return ignored;
})
.then();
}
@Override
public Mono<DnsZone> refreshAsync() {
return super.refreshAsync()
.map(dnsZone -> {
DnsZoneImpl impl = (DnsZoneImpl) dnsZone;
impl.initRecordSets();
return impl;
});
}
@Override
protected Mono<ZoneInner> getInnerAsync() {
return this.manager().inner().zones().getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
private void initRecordSets() {
this.aRecordSets = new ARecordSetsImpl(this);
this.aaaaRecordSets = new AaaaRecordSetsImpl(this);
this.caaRecordSets = new CaaRecordSetsImpl(this);
this.cnameRecordSets = new CNameRecordSetsImpl(this);
this.mxRecordSets = new MXRecordSetsImpl(this);
this.nsRecordSets = new NSRecordSetsImpl(this);
this.ptrRecordSets = new PtrRecordSetsImpl(this);
this.srvRecordSets = new SrvRecordSetsImpl(this);
this.txtRecordSets = new TxtRecordSetsImpl(this);
this.recordSets.clear();
}
private PagedIterable<DnsRecordSet> listRecordSetsIntern(String recordSetSuffix, Integer pageSize) {
final DnsZoneImpl self = this;
PagedFlux<DnsRecordSet> recordSets = PagedConverter.flatMapPage(
this.manager().inner().recordSets().listByDnsZoneAsync(this.resourceGroupName(), this.name(), pageSize, recordSetSuffix),
inner -> {
DnsRecordSet recordSet = new DnsRecordSetImpl(inner.getName(), inner.getType(), self, inner);
switch (recordSet.recordType()) {
case A:
return Mono.just(new ARecordSetImpl(inner.getName(), self, inner));
case AAAA:
return Mono.just(new AaaaRecordSetImpl(inner.getName(), self, inner));
case CAA:
return Mono.just(new CaaRecordSetImpl(inner.getName(), self, inner));
case CNAME:
return Mono.just(new CNameRecordSetImpl(inner.getName(), self, inner));
case MX:
return Mono.just(new MXRecordSetImpl(inner.getName(), self, inner));
case NS:
return Mono.just(new NSRecordSetImpl(inner.getName(), self, inner));
case PTR:
return Mono.just(new PtrRecordSetImpl(inner.getName(), self, inner));
case SOA:
return Mono.just(new SoaRecordSetImpl(inner.getName(), self, inner));
case SRV:
return Mono.just(new SrvRecordSetImpl(inner.getName(), self, inner));
case TXT:
return Mono.just(new TxtRecordSetImpl(inner.getName(), self, inner));
default:
return Mono.just(recordSet);
}
});
return new PagedIterable<>(recordSets);
}
@Override
public DnsZoneImpl withPublicAccess() {
this.inner().withZoneType(ZoneType.PUBLIC);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess() {
this.inner().withZoneType(ZoneType.PRIVATE);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess(List<String> registrationVirtualNetworkIds, List<String> resolutionVirtualNetworkIds) {
this.withPrivateAccess();
this.inner().withRegistrationVirtualNetworks(new ArrayList<>());
this.inner().withResolutionVirtualNetworks(new ArrayList<>());
for (String rvnId : registrationVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().registrationVirtualNetworks().add(sb);
}
for (String rvnId : resolutionVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().resolutionVirtualNetworks().add(sb);
}
return this;
}
} |
`withETagCheck()` could be available both Create phase and Update phase. So it's required to set both here. In Create phase, line#413 `self.inner().etag()` is NULL so it will send `If-Match: null` and `If-None-Match:*` during creation. In Update phase, it would send `If-Match: etag` and `If-None-Match: *`. This is the scenario applied by RecordSet in track 1. I tested it's working when sending `If-Match: etag` and `If-None-Match: *` for Update. | public DnsZoneImpl withETagCheck() {
this.eTagState.withImplicitETagCheckOnCreate();
this.eTagState.withImplicitETagCheckOnUpdate();
return this;
} | this.eTagState.withImplicitETagCheckOnUpdate(); | public DnsZoneImpl withETagCheck() {
this.eTagState.withImplicitETagCheckOnCreateOrUpdate(this.isInCreateMode());
return this;
} | class DnsZoneImpl
extends GroupableResourceImpl<
DnsZone,
ZoneInner,
DnsZoneImpl,
DnsZoneManager>
implements
DnsZone,
DnsZone.Definition,
DnsZone.Update {
private ARecordSets aRecordSets;
private AaaaRecordSets aaaaRecordSets;
private CaaRecordSets caaRecordSets;
private CNameRecordSets cnameRecordSets;
private MXRecordSets mxRecordSets;
private NSRecordSets nsRecordSets;
private PtrRecordSets ptrRecordSets;
private SrvRecordSets srvRecordSets;
private TxtRecordSets txtRecordSets;
private DnsRecordSetsImpl recordSets;
private final ETagState eTagState = new ETagState();
DnsZoneImpl(String name, final ZoneInner innerModel, final DnsZoneManager manager) {
super(name, innerModel, manager);
this.recordSets = new DnsRecordSetsImpl(this);
initRecordSets();
if (isInCreateMode()) {
this.inner().withZoneType(ZoneType.PUBLIC);
}
}
@Override
public long maxNumberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().maxNumberOfRecordSets());
}
@Override
public long numberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().numberOfRecordSets());
}
@Override
public String eTag() {
return this.inner().etag();
}
@Override
public ZoneType accessType() {
return this.inner().zoneType();
}
@Override
public List<String> registrationVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().registrationVirtualNetworks() != null) {
for (SubResource sb : this.inner().registrationVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public List<String> resolutionVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().resolutionVirtualNetworks() != null) {
for (SubResource sb : this.inner().resolutionVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets() {
return this.listRecordSetsIntern(null, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix) {
return this.listRecordSetsIntern(recordSetNameSuffix, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(int pageSize) {
return this.listRecordSetsIntern(null, pageSize);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix, int pageSize) {
return this.listRecordSetsIntern(recordSetNameSuffix, pageSize);
}
@Override
public List<String> nameServers() {
if (this.inner() == null) {
return new ArrayList<>();
}
return this.inner().nameServers();
}
@Override
public ARecordSets aRecordSets() {
return this.aRecordSets;
}
@Override
public AaaaRecordSets aaaaRecordSets() {
return this.aaaaRecordSets;
}
@Override
public CaaRecordSets caaRecordSets() {
return this.caaRecordSets;
}
@Override
public CNameRecordSets cNameRecordSets() {
return this.cnameRecordSets;
}
@Override
public MXRecordSets mxRecordSets() {
return this.mxRecordSets;
}
@Override
public NSRecordSets nsRecordSets() {
return this.nsRecordSets;
}
@Override
public PtrRecordSets ptrRecordSets() {
return this.ptrRecordSets;
}
@Override
public SrvRecordSets srvRecordSets() {
return this.srvRecordSets;
}
@Override
public TxtRecordSets txtRecordSets() {
return this.txtRecordSets;
}
@Override
public SoaRecordSet getSoaRecordSet() {
RecordSetInner inner = this.manager().inner().recordSets().get(this.resourceGroupName(), this.name(), "@", RecordType.SOA);
if (inner == null) {
return null;
}
return new SoaRecordSetImpl(inner.getName(), this, inner);
}
@Override
public DnsRecordSetImpl defineARecordSet(String name) {
return recordSets.defineARecordSet(name);
}
@Override
public DnsRecordSetImpl defineAaaaRecordSet(String name) {
return recordSets.defineAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl defineCaaRecordSet(String name) {
return recordSets.defineCaaRecordSet(name);
}
@Override
public DnsZoneImpl withCNameRecordSet(String name, String alias) {
recordSets.withCNameRecordSet(name, alias);
return this;
}
@Override
public DnsRecordSetImpl defineCNameRecordSet(String name) {
return recordSets.defineCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl defineMXRecordSet(String name) {
return recordSets.defineMXRecordSet(name);
}
@Override
public DnsRecordSetImpl defineNSRecordSet(String name) {
return recordSets.defineNSRecordSet(name);
}
@Override
public DnsRecordSetImpl definePtrRecordSet(String name) {
return recordSets.definePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl defineSrvRecordSet(String name) {
return recordSets.defineSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl defineTxtRecordSet(String name) {
return recordSets.defineTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateARecordSet(String name) {
return recordSets.updateARecordSet(name);
}
@Override
public DnsRecordSetImpl updateAaaaRecordSet(String name) {
return recordSets.updateAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCaaRecordSet(String name) {
return recordSets.updateCaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateMXRecordSet(String name) {
return recordSets.updateMXRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCNameRecordSet(String name) {
return recordSets.updateCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl updateNSRecordSet(String name) {
return recordSets.updateNSRecordSet(name);
}
@Override
public DnsRecordSetImpl updatePtrRecordSet(String name) {
return recordSets.updatePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSrvRecordSet(String name) {
return recordSets.updateSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl updateTxtRecordSet(String name) {
return recordSets.updateTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSoaRecord() {
return recordSets.updateSoaRecordSet();
}
@Override
public DnsZoneImpl withoutARecordSet(String name) {
return this.withoutARecordSet(name, null);
}
@Override
public DnsZoneImpl withoutARecordSet(String name, String eTag) {
recordSets.withoutARecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name) {
return this.withoutAaaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name, String eTag) {
recordSets.withoutAaaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name) {
return this.withoutCaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name, String eTag) {
recordSets.withoutCaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name) {
return this.withoutCNameRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name, String eTag) {
recordSets.withoutCNameRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name) {
return this.withoutMXRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name, String eTag) {
recordSets.withoutMXRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name) {
return this.withoutNSRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name, String eTag) {
recordSets.withoutNSRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name) {
return this.withoutPtrRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name, String eTag) {
recordSets.withoutPtrRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name) {
return this.withoutSrvRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name, String eTag) {
recordSets.withoutSrvRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name) {
return this.withoutTxtRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name, String eTag) {
recordSets.withoutTxtRecordSet(name, eTag);
return this;
}
@Override
@Override
public DnsZoneImpl withETagCheck(String eTagValue) {
this.eTagState.withExplicitETagCheckOnUpdate(eTagValue);
return this;
}
@Override
public Mono<DnsZone> createResourceAsync() {
return Mono.just(this)
.flatMap(self -> self.manager().inner().zones().createOrUpdateAsync(self.resourceGroupName(),
self.name(), self.inner(), eTagState.ifMatchValueOnUpdate(self.inner().etag()), eTagState.ifNonMatchValueOnCreate()))
.map(innerToFluentMap(this))
.map(dnsZone -> {
this.eTagState.clear();
return dnsZone;
});
}
@Override
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) {
return Mono.just(true)
.map(ignored -> {
recordSets.clear();
return ignored;
})
.then();
}
@Override
public Mono<DnsZone> refreshAsync() {
return super.refreshAsync()
.map(dnsZone -> {
DnsZoneImpl impl = (DnsZoneImpl) dnsZone;
impl.initRecordSets();
return impl;
});
}
@Override
protected Mono<ZoneInner> getInnerAsync() {
return this.manager().inner().zones().getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
private void initRecordSets() {
this.aRecordSets = new ARecordSetsImpl(this);
this.aaaaRecordSets = new AaaaRecordSetsImpl(this);
this.caaRecordSets = new CaaRecordSetsImpl(this);
this.cnameRecordSets = new CNameRecordSetsImpl(this);
this.mxRecordSets = new MXRecordSetsImpl(this);
this.nsRecordSets = new NSRecordSetsImpl(this);
this.ptrRecordSets = new PtrRecordSetsImpl(this);
this.srvRecordSets = new SrvRecordSetsImpl(this);
this.txtRecordSets = new TxtRecordSetsImpl(this);
this.recordSets.clear();
}
private PagedIterable<DnsRecordSet> listRecordSetsIntern(String recordSetSuffix, Integer pageSize) {
final DnsZoneImpl self = this;
PagedFlux<DnsRecordSet> recordSets = PagedConverter.flatMapPage(
this.manager().inner().recordSets().listByDnsZoneAsync(this.resourceGroupName(), this.name(), pageSize, recordSetSuffix),
inner -> {
DnsRecordSet recordSet = new DnsRecordSetImpl(inner.getName(), inner.getType(), self, inner);
switch (recordSet.recordType()) {
case A:
return Mono.just(new ARecordSetImpl(inner.getName(), self, inner));
case AAAA:
return Mono.just(new AaaaRecordSetImpl(inner.getName(), self, inner));
case CAA:
return Mono.just(new CaaRecordSetImpl(inner.getName(), self, inner));
case CNAME:
return Mono.just(new CNameRecordSetImpl(inner.getName(), self, inner));
case MX:
return Mono.just(new MXRecordSetImpl(inner.getName(), self, inner));
case NS:
return Mono.just(new NSRecordSetImpl(inner.getName(), self, inner));
case PTR:
return Mono.just(new PtrRecordSetImpl(inner.getName(), self, inner));
case SOA:
return Mono.just(new SoaRecordSetImpl(inner.getName(), self, inner));
case SRV:
return Mono.just(new SrvRecordSetImpl(inner.getName(), self, inner));
case TXT:
return Mono.just(new TxtRecordSetImpl(inner.getName(), self, inner));
default:
return Mono.just(recordSet);
}
});
return new PagedIterable<>(recordSets);
}
@Override
public DnsZoneImpl withPublicAccess() {
this.inner().withZoneType(ZoneType.PUBLIC);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess() {
this.inner().withZoneType(ZoneType.PRIVATE);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess(List<String> registrationVirtualNetworkIds, List<String> resolutionVirtualNetworkIds) {
this.withPrivateAccess();
this.inner().withRegistrationVirtualNetworks(new ArrayList<>());
this.inner().withResolutionVirtualNetworks(new ArrayList<>());
for (String rvnId : registrationVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().registrationVirtualNetworks().add(sb);
}
for (String rvnId : resolutionVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().resolutionVirtualNetworks().add(sb);
}
return this;
}
} | class DnsZoneImpl
extends GroupableResourceImpl<
DnsZone,
ZoneInner,
DnsZoneImpl,
DnsZoneManager>
implements
DnsZone,
DnsZone.Definition,
DnsZone.Update {
private ARecordSets aRecordSets;
private AaaaRecordSets aaaaRecordSets;
private CaaRecordSets caaRecordSets;
private CNameRecordSets cnameRecordSets;
private MXRecordSets mxRecordSets;
private NSRecordSets nsRecordSets;
private PtrRecordSets ptrRecordSets;
private SrvRecordSets srvRecordSets;
private TxtRecordSets txtRecordSets;
private DnsRecordSetsImpl recordSets;
private final ETagState eTagState = new ETagState();
DnsZoneImpl(String name, final ZoneInner innerModel, final DnsZoneManager manager) {
super(name, innerModel, manager);
this.recordSets = new DnsRecordSetsImpl(this);
initRecordSets();
if (isInCreateMode()) {
this.inner().withZoneType(ZoneType.PUBLIC);
}
}
@Override
public long maxNumberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().maxNumberOfRecordSets());
}
@Override
public long numberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().numberOfRecordSets());
}
@Override
public String eTag() {
return this.inner().etag();
}
@Override
public ZoneType accessType() {
return this.inner().zoneType();
}
@Override
public List<String> registrationVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().registrationVirtualNetworks() != null) {
for (SubResource sb : this.inner().registrationVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public List<String> resolutionVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().resolutionVirtualNetworks() != null) {
for (SubResource sb : this.inner().resolutionVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets() {
return this.listRecordSetsIntern(null, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix) {
return this.listRecordSetsIntern(recordSetNameSuffix, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(int pageSize) {
return this.listRecordSetsIntern(null, pageSize);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix, int pageSize) {
return this.listRecordSetsIntern(recordSetNameSuffix, pageSize);
}
@Override
public List<String> nameServers() {
if (this.inner() == null) {
return new ArrayList<>();
}
return this.inner().nameServers();
}
@Override
public ARecordSets aRecordSets() {
return this.aRecordSets;
}
@Override
public AaaaRecordSets aaaaRecordSets() {
return this.aaaaRecordSets;
}
@Override
public CaaRecordSets caaRecordSets() {
return this.caaRecordSets;
}
@Override
public CNameRecordSets cNameRecordSets() {
return this.cnameRecordSets;
}
@Override
public MXRecordSets mxRecordSets() {
return this.mxRecordSets;
}
@Override
public NSRecordSets nsRecordSets() {
return this.nsRecordSets;
}
@Override
public PtrRecordSets ptrRecordSets() {
return this.ptrRecordSets;
}
@Override
public SrvRecordSets srvRecordSets() {
return this.srvRecordSets;
}
@Override
public TxtRecordSets txtRecordSets() {
return this.txtRecordSets;
}
@Override
public SoaRecordSet getSoaRecordSet() {
RecordSetInner inner = this.manager().inner().recordSets().get(this.resourceGroupName(), this.name(), "@", RecordType.SOA);
if (inner == null) {
return null;
}
return new SoaRecordSetImpl(inner.getName(), this, inner);
}
@Override
public DnsRecordSetImpl defineARecordSet(String name) {
return recordSets.defineARecordSet(name);
}
@Override
public DnsRecordSetImpl defineAaaaRecordSet(String name) {
return recordSets.defineAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl defineCaaRecordSet(String name) {
return recordSets.defineCaaRecordSet(name);
}
@Override
public DnsZoneImpl withCNameRecordSet(String name, String alias) {
recordSets.withCNameRecordSet(name, alias);
return this;
}
@Override
public DnsRecordSetImpl defineCNameRecordSet(String name) {
return recordSets.defineCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl defineMXRecordSet(String name) {
return recordSets.defineMXRecordSet(name);
}
@Override
public DnsRecordSetImpl defineNSRecordSet(String name) {
return recordSets.defineNSRecordSet(name);
}
@Override
public DnsRecordSetImpl definePtrRecordSet(String name) {
return recordSets.definePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl defineSrvRecordSet(String name) {
return recordSets.defineSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl defineTxtRecordSet(String name) {
return recordSets.defineTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateARecordSet(String name) {
return recordSets.updateARecordSet(name);
}
@Override
public DnsRecordSetImpl updateAaaaRecordSet(String name) {
return recordSets.updateAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCaaRecordSet(String name) {
return recordSets.updateCaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateMXRecordSet(String name) {
return recordSets.updateMXRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCNameRecordSet(String name) {
return recordSets.updateCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl updateNSRecordSet(String name) {
return recordSets.updateNSRecordSet(name);
}
@Override
public DnsRecordSetImpl updatePtrRecordSet(String name) {
return recordSets.updatePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSrvRecordSet(String name) {
return recordSets.updateSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl updateTxtRecordSet(String name) {
return recordSets.updateTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSoaRecord() {
return recordSets.updateSoaRecordSet();
}
@Override
public DnsZoneImpl withoutARecordSet(String name) {
return this.withoutARecordSet(name, null);
}
@Override
public DnsZoneImpl withoutARecordSet(String name, String eTag) {
recordSets.withoutARecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name) {
return this.withoutAaaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name, String eTag) {
recordSets.withoutAaaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name) {
return this.withoutCaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name, String eTag) {
recordSets.withoutCaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name) {
return this.withoutCNameRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name, String eTag) {
recordSets.withoutCNameRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name) {
return this.withoutMXRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name, String eTag) {
recordSets.withoutMXRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name) {
return this.withoutNSRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name, String eTag) {
recordSets.withoutNSRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name) {
return this.withoutPtrRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name, String eTag) {
recordSets.withoutPtrRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name) {
return this.withoutSrvRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name, String eTag) {
recordSets.withoutSrvRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name) {
return this.withoutTxtRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name, String eTag) {
recordSets.withoutTxtRecordSet(name, eTag);
return this;
}
@Override
@Override
public DnsZoneImpl withETagCheck(String eTagValue) {
this.eTagState.withExplicitETagCheckOnUpdate(eTagValue);
return this;
}
@Override
public Mono<DnsZone> createResourceAsync() {
return Mono.just(this)
.flatMap(self -> self.manager().inner().zones().createOrUpdateAsync(self.resourceGroupName(),
self.name(), self.inner(), eTagState.ifMatchValueOnUpdate(self.inner().etag()), eTagState.ifNonMatchValueOnCreate()))
.map(innerToFluentMap(this))
.map(dnsZone -> {
this.eTagState.clear();
return dnsZone;
});
}
@Override
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) {
return Mono.just(true)
.map(ignored -> {
recordSets.clear();
return ignored;
})
.then();
}
@Override
public Mono<DnsZone> refreshAsync() {
return super.refreshAsync()
.map(dnsZone -> {
DnsZoneImpl impl = (DnsZoneImpl) dnsZone;
impl.initRecordSets();
return impl;
});
}
@Override
protected Mono<ZoneInner> getInnerAsync() {
return this.manager().inner().zones().getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
private void initRecordSets() {
this.aRecordSets = new ARecordSetsImpl(this);
this.aaaaRecordSets = new AaaaRecordSetsImpl(this);
this.caaRecordSets = new CaaRecordSetsImpl(this);
this.cnameRecordSets = new CNameRecordSetsImpl(this);
this.mxRecordSets = new MXRecordSetsImpl(this);
this.nsRecordSets = new NSRecordSetsImpl(this);
this.ptrRecordSets = new PtrRecordSetsImpl(this);
this.srvRecordSets = new SrvRecordSetsImpl(this);
this.txtRecordSets = new TxtRecordSetsImpl(this);
this.recordSets.clear();
}
private PagedIterable<DnsRecordSet> listRecordSetsIntern(String recordSetSuffix, Integer pageSize) {
final DnsZoneImpl self = this;
PagedFlux<DnsRecordSet> recordSets = PagedConverter.flatMapPage(
this.manager().inner().recordSets().listByDnsZoneAsync(this.resourceGroupName(), this.name(), pageSize, recordSetSuffix),
inner -> {
DnsRecordSet recordSet = new DnsRecordSetImpl(inner.getName(), inner.getType(), self, inner);
switch (recordSet.recordType()) {
case A:
return Mono.just(new ARecordSetImpl(inner.getName(), self, inner));
case AAAA:
return Mono.just(new AaaaRecordSetImpl(inner.getName(), self, inner));
case CAA:
return Mono.just(new CaaRecordSetImpl(inner.getName(), self, inner));
case CNAME:
return Mono.just(new CNameRecordSetImpl(inner.getName(), self, inner));
case MX:
return Mono.just(new MXRecordSetImpl(inner.getName(), self, inner));
case NS:
return Mono.just(new NSRecordSetImpl(inner.getName(), self, inner));
case PTR:
return Mono.just(new PtrRecordSetImpl(inner.getName(), self, inner));
case SOA:
return Mono.just(new SoaRecordSetImpl(inner.getName(), self, inner));
case SRV:
return Mono.just(new SrvRecordSetImpl(inner.getName(), self, inner));
case TXT:
return Mono.just(new TxtRecordSetImpl(inner.getName(), self, inner));
default:
return Mono.just(recordSet);
}
});
return new PagedIterable<>(recordSets);
}
@Override
public DnsZoneImpl withPublicAccess() {
this.inner().withZoneType(ZoneType.PUBLIC);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess() {
this.inner().withZoneType(ZoneType.PRIVATE);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess(List<String> registrationVirtualNetworkIds, List<String> resolutionVirtualNetworkIds) {
this.withPrivateAccess();
this.inner().withRegistrationVirtualNetworks(new ArrayList<>());
this.inner().withResolutionVirtualNetworks(new ArrayList<>());
for (String rvnId : registrationVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().registrationVirtualNetworks().add(sb);
}
for (String rvnId : resolutionVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().resolutionVirtualNetworks().add(sb);
}
return this;
}
} |
The problem is why does `If-None-Match: *` work on UPDATE? The condition specified by it is that there being not any match, isn't it? Unless you can find spec elaborating it is acceptable, don't do it. The test pass might be just service behavior, which might not work on another service. | public DnsZoneImpl withETagCheck() {
this.eTagState.withImplicitETagCheckOnCreate();
this.eTagState.withImplicitETagCheckOnUpdate();
return this;
} | this.eTagState.withImplicitETagCheckOnUpdate(); | public DnsZoneImpl withETagCheck() {
this.eTagState.withImplicitETagCheckOnCreateOrUpdate(this.isInCreateMode());
return this;
} | class DnsZoneImpl
extends GroupableResourceImpl<
DnsZone,
ZoneInner,
DnsZoneImpl,
DnsZoneManager>
implements
DnsZone,
DnsZone.Definition,
DnsZone.Update {
private ARecordSets aRecordSets;
private AaaaRecordSets aaaaRecordSets;
private CaaRecordSets caaRecordSets;
private CNameRecordSets cnameRecordSets;
private MXRecordSets mxRecordSets;
private NSRecordSets nsRecordSets;
private PtrRecordSets ptrRecordSets;
private SrvRecordSets srvRecordSets;
private TxtRecordSets txtRecordSets;
private DnsRecordSetsImpl recordSets;
private final ETagState eTagState = new ETagState();
DnsZoneImpl(String name, final ZoneInner innerModel, final DnsZoneManager manager) {
super(name, innerModel, manager);
this.recordSets = new DnsRecordSetsImpl(this);
initRecordSets();
if (isInCreateMode()) {
this.inner().withZoneType(ZoneType.PUBLIC);
}
}
@Override
public long maxNumberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().maxNumberOfRecordSets());
}
@Override
public long numberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().numberOfRecordSets());
}
@Override
public String eTag() {
return this.inner().etag();
}
@Override
public ZoneType accessType() {
return this.inner().zoneType();
}
@Override
public List<String> registrationVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().registrationVirtualNetworks() != null) {
for (SubResource sb : this.inner().registrationVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public List<String> resolutionVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().resolutionVirtualNetworks() != null) {
for (SubResource sb : this.inner().resolutionVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets() {
return this.listRecordSetsIntern(null, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix) {
return this.listRecordSetsIntern(recordSetNameSuffix, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(int pageSize) {
return this.listRecordSetsIntern(null, pageSize);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix, int pageSize) {
return this.listRecordSetsIntern(recordSetNameSuffix, pageSize);
}
@Override
public List<String> nameServers() {
if (this.inner() == null) {
return new ArrayList<>();
}
return this.inner().nameServers();
}
@Override
public ARecordSets aRecordSets() {
return this.aRecordSets;
}
@Override
public AaaaRecordSets aaaaRecordSets() {
return this.aaaaRecordSets;
}
@Override
public CaaRecordSets caaRecordSets() {
return this.caaRecordSets;
}
@Override
public CNameRecordSets cNameRecordSets() {
return this.cnameRecordSets;
}
@Override
public MXRecordSets mxRecordSets() {
return this.mxRecordSets;
}
@Override
public NSRecordSets nsRecordSets() {
return this.nsRecordSets;
}
@Override
public PtrRecordSets ptrRecordSets() {
return this.ptrRecordSets;
}
@Override
public SrvRecordSets srvRecordSets() {
return this.srvRecordSets;
}
@Override
public TxtRecordSets txtRecordSets() {
return this.txtRecordSets;
}
@Override
public SoaRecordSet getSoaRecordSet() {
RecordSetInner inner = this.manager().inner().recordSets().get(this.resourceGroupName(), this.name(), "@", RecordType.SOA);
if (inner == null) {
return null;
}
return new SoaRecordSetImpl(inner.getName(), this, inner);
}
@Override
public DnsRecordSetImpl defineARecordSet(String name) {
return recordSets.defineARecordSet(name);
}
@Override
public DnsRecordSetImpl defineAaaaRecordSet(String name) {
return recordSets.defineAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl defineCaaRecordSet(String name) {
return recordSets.defineCaaRecordSet(name);
}
@Override
public DnsZoneImpl withCNameRecordSet(String name, String alias) {
recordSets.withCNameRecordSet(name, alias);
return this;
}
@Override
public DnsRecordSetImpl defineCNameRecordSet(String name) {
return recordSets.defineCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl defineMXRecordSet(String name) {
return recordSets.defineMXRecordSet(name);
}
@Override
public DnsRecordSetImpl defineNSRecordSet(String name) {
return recordSets.defineNSRecordSet(name);
}
@Override
public DnsRecordSetImpl definePtrRecordSet(String name) {
return recordSets.definePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl defineSrvRecordSet(String name) {
return recordSets.defineSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl defineTxtRecordSet(String name) {
return recordSets.defineTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateARecordSet(String name) {
return recordSets.updateARecordSet(name);
}
@Override
public DnsRecordSetImpl updateAaaaRecordSet(String name) {
return recordSets.updateAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCaaRecordSet(String name) {
return recordSets.updateCaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateMXRecordSet(String name) {
return recordSets.updateMXRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCNameRecordSet(String name) {
return recordSets.updateCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl updateNSRecordSet(String name) {
return recordSets.updateNSRecordSet(name);
}
@Override
public DnsRecordSetImpl updatePtrRecordSet(String name) {
return recordSets.updatePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSrvRecordSet(String name) {
return recordSets.updateSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl updateTxtRecordSet(String name) {
return recordSets.updateTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSoaRecord() {
return recordSets.updateSoaRecordSet();
}
@Override
public DnsZoneImpl withoutARecordSet(String name) {
return this.withoutARecordSet(name, null);
}
@Override
public DnsZoneImpl withoutARecordSet(String name, String eTag) {
recordSets.withoutARecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name) {
return this.withoutAaaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name, String eTag) {
recordSets.withoutAaaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name) {
return this.withoutCaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name, String eTag) {
recordSets.withoutCaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name) {
return this.withoutCNameRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name, String eTag) {
recordSets.withoutCNameRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name) {
return this.withoutMXRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name, String eTag) {
recordSets.withoutMXRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name) {
return this.withoutNSRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name, String eTag) {
recordSets.withoutNSRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name) {
return this.withoutPtrRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name, String eTag) {
recordSets.withoutPtrRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name) {
return this.withoutSrvRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name, String eTag) {
recordSets.withoutSrvRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name) {
return this.withoutTxtRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name, String eTag) {
recordSets.withoutTxtRecordSet(name, eTag);
return this;
}
@Override
@Override
public DnsZoneImpl withETagCheck(String eTagValue) {
this.eTagState.withExplicitETagCheckOnUpdate(eTagValue);
return this;
}
@Override
public Mono<DnsZone> createResourceAsync() {
return Mono.just(this)
.flatMap(self -> self.manager().inner().zones().createOrUpdateAsync(self.resourceGroupName(),
self.name(), self.inner(), eTagState.ifMatchValueOnUpdate(self.inner().etag()), eTagState.ifNonMatchValueOnCreate()))
.map(innerToFluentMap(this))
.map(dnsZone -> {
this.eTagState.clear();
return dnsZone;
});
}
@Override
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) {
return Mono.just(true)
.map(ignored -> {
recordSets.clear();
return ignored;
})
.then();
}
@Override
public Mono<DnsZone> refreshAsync() {
return super.refreshAsync()
.map(dnsZone -> {
DnsZoneImpl impl = (DnsZoneImpl) dnsZone;
impl.initRecordSets();
return impl;
});
}
@Override
protected Mono<ZoneInner> getInnerAsync() {
return this.manager().inner().zones().getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
private void initRecordSets() {
this.aRecordSets = new ARecordSetsImpl(this);
this.aaaaRecordSets = new AaaaRecordSetsImpl(this);
this.caaRecordSets = new CaaRecordSetsImpl(this);
this.cnameRecordSets = new CNameRecordSetsImpl(this);
this.mxRecordSets = new MXRecordSetsImpl(this);
this.nsRecordSets = new NSRecordSetsImpl(this);
this.ptrRecordSets = new PtrRecordSetsImpl(this);
this.srvRecordSets = new SrvRecordSetsImpl(this);
this.txtRecordSets = new TxtRecordSetsImpl(this);
this.recordSets.clear();
}
private PagedIterable<DnsRecordSet> listRecordSetsIntern(String recordSetSuffix, Integer pageSize) {
final DnsZoneImpl self = this;
PagedFlux<DnsRecordSet> recordSets = PagedConverter.flatMapPage(
this.manager().inner().recordSets().listByDnsZoneAsync(this.resourceGroupName(), this.name(), pageSize, recordSetSuffix),
inner -> {
DnsRecordSet recordSet = new DnsRecordSetImpl(inner.getName(), inner.getType(), self, inner);
switch (recordSet.recordType()) {
case A:
return Mono.just(new ARecordSetImpl(inner.getName(), self, inner));
case AAAA:
return Mono.just(new AaaaRecordSetImpl(inner.getName(), self, inner));
case CAA:
return Mono.just(new CaaRecordSetImpl(inner.getName(), self, inner));
case CNAME:
return Mono.just(new CNameRecordSetImpl(inner.getName(), self, inner));
case MX:
return Mono.just(new MXRecordSetImpl(inner.getName(), self, inner));
case NS:
return Mono.just(new NSRecordSetImpl(inner.getName(), self, inner));
case PTR:
return Mono.just(new PtrRecordSetImpl(inner.getName(), self, inner));
case SOA:
return Mono.just(new SoaRecordSetImpl(inner.getName(), self, inner));
case SRV:
return Mono.just(new SrvRecordSetImpl(inner.getName(), self, inner));
case TXT:
return Mono.just(new TxtRecordSetImpl(inner.getName(), self, inner));
default:
return Mono.just(recordSet);
}
});
return new PagedIterable<>(recordSets);
}
@Override
public DnsZoneImpl withPublicAccess() {
this.inner().withZoneType(ZoneType.PUBLIC);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess() {
this.inner().withZoneType(ZoneType.PRIVATE);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess(List<String> registrationVirtualNetworkIds, List<String> resolutionVirtualNetworkIds) {
this.withPrivateAccess();
this.inner().withRegistrationVirtualNetworks(new ArrayList<>());
this.inner().withResolutionVirtualNetworks(new ArrayList<>());
for (String rvnId : registrationVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().registrationVirtualNetworks().add(sb);
}
for (String rvnId : resolutionVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().resolutionVirtualNetworks().add(sb);
}
return this;
}
} | class DnsZoneImpl
extends GroupableResourceImpl<
DnsZone,
ZoneInner,
DnsZoneImpl,
DnsZoneManager>
implements
DnsZone,
DnsZone.Definition,
DnsZone.Update {
private ARecordSets aRecordSets;
private AaaaRecordSets aaaaRecordSets;
private CaaRecordSets caaRecordSets;
private CNameRecordSets cnameRecordSets;
private MXRecordSets mxRecordSets;
private NSRecordSets nsRecordSets;
private PtrRecordSets ptrRecordSets;
private SrvRecordSets srvRecordSets;
private TxtRecordSets txtRecordSets;
private DnsRecordSetsImpl recordSets;
private final ETagState eTagState = new ETagState();
DnsZoneImpl(String name, final ZoneInner innerModel, final DnsZoneManager manager) {
super(name, innerModel, manager);
this.recordSets = new DnsRecordSetsImpl(this);
initRecordSets();
if (isInCreateMode()) {
this.inner().withZoneType(ZoneType.PUBLIC);
}
}
@Override
public long maxNumberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().maxNumberOfRecordSets());
}
@Override
public long numberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().numberOfRecordSets());
}
@Override
public String eTag() {
return this.inner().etag();
}
@Override
public ZoneType accessType() {
return this.inner().zoneType();
}
@Override
public List<String> registrationVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().registrationVirtualNetworks() != null) {
for (SubResource sb : this.inner().registrationVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public List<String> resolutionVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().resolutionVirtualNetworks() != null) {
for (SubResource sb : this.inner().resolutionVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets() {
return this.listRecordSetsIntern(null, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix) {
return this.listRecordSetsIntern(recordSetNameSuffix, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(int pageSize) {
return this.listRecordSetsIntern(null, pageSize);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix, int pageSize) {
return this.listRecordSetsIntern(recordSetNameSuffix, pageSize);
}
@Override
public List<String> nameServers() {
if (this.inner() == null) {
return new ArrayList<>();
}
return this.inner().nameServers();
}
@Override
public ARecordSets aRecordSets() {
return this.aRecordSets;
}
@Override
public AaaaRecordSets aaaaRecordSets() {
return this.aaaaRecordSets;
}
@Override
public CaaRecordSets caaRecordSets() {
return this.caaRecordSets;
}
@Override
public CNameRecordSets cNameRecordSets() {
return this.cnameRecordSets;
}
@Override
public MXRecordSets mxRecordSets() {
return this.mxRecordSets;
}
@Override
public NSRecordSets nsRecordSets() {
return this.nsRecordSets;
}
@Override
public PtrRecordSets ptrRecordSets() {
return this.ptrRecordSets;
}
@Override
public SrvRecordSets srvRecordSets() {
return this.srvRecordSets;
}
@Override
public TxtRecordSets txtRecordSets() {
return this.txtRecordSets;
}
@Override
public SoaRecordSet getSoaRecordSet() {
RecordSetInner inner = this.manager().inner().recordSets().get(this.resourceGroupName(), this.name(), "@", RecordType.SOA);
if (inner == null) {
return null;
}
return new SoaRecordSetImpl(inner.getName(), this, inner);
}
@Override
public DnsRecordSetImpl defineARecordSet(String name) {
return recordSets.defineARecordSet(name);
}
@Override
public DnsRecordSetImpl defineAaaaRecordSet(String name) {
return recordSets.defineAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl defineCaaRecordSet(String name) {
return recordSets.defineCaaRecordSet(name);
}
@Override
public DnsZoneImpl withCNameRecordSet(String name, String alias) {
recordSets.withCNameRecordSet(name, alias);
return this;
}
@Override
public DnsRecordSetImpl defineCNameRecordSet(String name) {
return recordSets.defineCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl defineMXRecordSet(String name) {
return recordSets.defineMXRecordSet(name);
}
@Override
public DnsRecordSetImpl defineNSRecordSet(String name) {
return recordSets.defineNSRecordSet(name);
}
@Override
public DnsRecordSetImpl definePtrRecordSet(String name) {
return recordSets.definePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl defineSrvRecordSet(String name) {
return recordSets.defineSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl defineTxtRecordSet(String name) {
return recordSets.defineTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateARecordSet(String name) {
return recordSets.updateARecordSet(name);
}
@Override
public DnsRecordSetImpl updateAaaaRecordSet(String name) {
return recordSets.updateAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCaaRecordSet(String name) {
return recordSets.updateCaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateMXRecordSet(String name) {
return recordSets.updateMXRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCNameRecordSet(String name) {
return recordSets.updateCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl updateNSRecordSet(String name) {
return recordSets.updateNSRecordSet(name);
}
@Override
public DnsRecordSetImpl updatePtrRecordSet(String name) {
return recordSets.updatePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSrvRecordSet(String name) {
return recordSets.updateSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl updateTxtRecordSet(String name) {
return recordSets.updateTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSoaRecord() {
return recordSets.updateSoaRecordSet();
}
@Override
public DnsZoneImpl withoutARecordSet(String name) {
return this.withoutARecordSet(name, null);
}
@Override
public DnsZoneImpl withoutARecordSet(String name, String eTag) {
recordSets.withoutARecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name) {
return this.withoutAaaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name, String eTag) {
recordSets.withoutAaaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name) {
return this.withoutCaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name, String eTag) {
recordSets.withoutCaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name) {
return this.withoutCNameRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name, String eTag) {
recordSets.withoutCNameRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name) {
return this.withoutMXRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name, String eTag) {
recordSets.withoutMXRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name) {
return this.withoutNSRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name, String eTag) {
recordSets.withoutNSRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name) {
return this.withoutPtrRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name, String eTag) {
recordSets.withoutPtrRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name) {
return this.withoutSrvRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name, String eTag) {
recordSets.withoutSrvRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name) {
return this.withoutTxtRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name, String eTag) {
recordSets.withoutTxtRecordSet(name, eTag);
return this;
}
@Override
@Override
public DnsZoneImpl withETagCheck(String eTagValue) {
this.eTagState.withExplicitETagCheckOnUpdate(eTagValue);
return this;
}
@Override
public Mono<DnsZone> createResourceAsync() {
return Mono.just(this)
.flatMap(self -> self.manager().inner().zones().createOrUpdateAsync(self.resourceGroupName(),
self.name(), self.inner(), eTagState.ifMatchValueOnUpdate(self.inner().etag()), eTagState.ifNonMatchValueOnCreate()))
.map(innerToFluentMap(this))
.map(dnsZone -> {
this.eTagState.clear();
return dnsZone;
});
}
@Override
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) {
return Mono.just(true)
.map(ignored -> {
recordSets.clear();
return ignored;
})
.then();
}
@Override
public Mono<DnsZone> refreshAsync() {
return super.refreshAsync()
.map(dnsZone -> {
DnsZoneImpl impl = (DnsZoneImpl) dnsZone;
impl.initRecordSets();
return impl;
});
}
@Override
protected Mono<ZoneInner> getInnerAsync() {
return this.manager().inner().zones().getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
private void initRecordSets() {
this.aRecordSets = new ARecordSetsImpl(this);
this.aaaaRecordSets = new AaaaRecordSetsImpl(this);
this.caaRecordSets = new CaaRecordSetsImpl(this);
this.cnameRecordSets = new CNameRecordSetsImpl(this);
this.mxRecordSets = new MXRecordSetsImpl(this);
this.nsRecordSets = new NSRecordSetsImpl(this);
this.ptrRecordSets = new PtrRecordSetsImpl(this);
this.srvRecordSets = new SrvRecordSetsImpl(this);
this.txtRecordSets = new TxtRecordSetsImpl(this);
this.recordSets.clear();
}
private PagedIterable<DnsRecordSet> listRecordSetsIntern(String recordSetSuffix, Integer pageSize) {
final DnsZoneImpl self = this;
PagedFlux<DnsRecordSet> recordSets = PagedConverter.flatMapPage(
this.manager().inner().recordSets().listByDnsZoneAsync(this.resourceGroupName(), this.name(), pageSize, recordSetSuffix),
inner -> {
DnsRecordSet recordSet = new DnsRecordSetImpl(inner.getName(), inner.getType(), self, inner);
switch (recordSet.recordType()) {
case A:
return Mono.just(new ARecordSetImpl(inner.getName(), self, inner));
case AAAA:
return Mono.just(new AaaaRecordSetImpl(inner.getName(), self, inner));
case CAA:
return Mono.just(new CaaRecordSetImpl(inner.getName(), self, inner));
case CNAME:
return Mono.just(new CNameRecordSetImpl(inner.getName(), self, inner));
case MX:
return Mono.just(new MXRecordSetImpl(inner.getName(), self, inner));
case NS:
return Mono.just(new NSRecordSetImpl(inner.getName(), self, inner));
case PTR:
return Mono.just(new PtrRecordSetImpl(inner.getName(), self, inner));
case SOA:
return Mono.just(new SoaRecordSetImpl(inner.getName(), self, inner));
case SRV:
return Mono.just(new SrvRecordSetImpl(inner.getName(), self, inner));
case TXT:
return Mono.just(new TxtRecordSetImpl(inner.getName(), self, inner));
default:
return Mono.just(recordSet);
}
});
return new PagedIterable<>(recordSets);
}
@Override
public DnsZoneImpl withPublicAccess() {
this.inner().withZoneType(ZoneType.PUBLIC);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess() {
this.inner().withZoneType(ZoneType.PRIVATE);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess(List<String> registrationVirtualNetworkIds, List<String> resolutionVirtualNetworkIds) {
this.withPrivateAccess();
this.inner().withRegistrationVirtualNetworks(new ArrayList<>());
this.inner().withResolutionVirtualNetworks(new ArrayList<>());
for (String rvnId : registrationVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().registrationVirtualNetworks().add(sb);
}
for (String rvnId : resolutionVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().resolutionVirtualNetworks().add(sb);
}
return this;
}
} |
Agree. I just commit changes for this. Now it will only set one of them by verifying inner.id(). | public DnsZoneImpl withETagCheck() {
this.eTagState.withImplicitETagCheckOnCreate();
this.eTagState.withImplicitETagCheckOnUpdate();
return this;
} | this.eTagState.withImplicitETagCheckOnUpdate(); | public DnsZoneImpl withETagCheck() {
this.eTagState.withImplicitETagCheckOnCreateOrUpdate(this.isInCreateMode());
return this;
} | class DnsZoneImpl
extends GroupableResourceImpl<
DnsZone,
ZoneInner,
DnsZoneImpl,
DnsZoneManager>
implements
DnsZone,
DnsZone.Definition,
DnsZone.Update {
private ARecordSets aRecordSets;
private AaaaRecordSets aaaaRecordSets;
private CaaRecordSets caaRecordSets;
private CNameRecordSets cnameRecordSets;
private MXRecordSets mxRecordSets;
private NSRecordSets nsRecordSets;
private PtrRecordSets ptrRecordSets;
private SrvRecordSets srvRecordSets;
private TxtRecordSets txtRecordSets;
private DnsRecordSetsImpl recordSets;
private final ETagState eTagState = new ETagState();
DnsZoneImpl(String name, final ZoneInner innerModel, final DnsZoneManager manager) {
super(name, innerModel, manager);
this.recordSets = new DnsRecordSetsImpl(this);
initRecordSets();
if (isInCreateMode()) {
this.inner().withZoneType(ZoneType.PUBLIC);
}
}
@Override
public long maxNumberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().maxNumberOfRecordSets());
}
@Override
public long numberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().numberOfRecordSets());
}
@Override
public String eTag() {
return this.inner().etag();
}
@Override
public ZoneType accessType() {
return this.inner().zoneType();
}
@Override
public List<String> registrationVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().registrationVirtualNetworks() != null) {
for (SubResource sb : this.inner().registrationVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public List<String> resolutionVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().resolutionVirtualNetworks() != null) {
for (SubResource sb : this.inner().resolutionVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets() {
return this.listRecordSetsIntern(null, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix) {
return this.listRecordSetsIntern(recordSetNameSuffix, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(int pageSize) {
return this.listRecordSetsIntern(null, pageSize);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix, int pageSize) {
return this.listRecordSetsIntern(recordSetNameSuffix, pageSize);
}
@Override
public List<String> nameServers() {
if (this.inner() == null) {
return new ArrayList<>();
}
return this.inner().nameServers();
}
@Override
public ARecordSets aRecordSets() {
return this.aRecordSets;
}
@Override
public AaaaRecordSets aaaaRecordSets() {
return this.aaaaRecordSets;
}
@Override
public CaaRecordSets caaRecordSets() {
return this.caaRecordSets;
}
@Override
public CNameRecordSets cNameRecordSets() {
return this.cnameRecordSets;
}
@Override
public MXRecordSets mxRecordSets() {
return this.mxRecordSets;
}
@Override
public NSRecordSets nsRecordSets() {
return this.nsRecordSets;
}
@Override
public PtrRecordSets ptrRecordSets() {
return this.ptrRecordSets;
}
@Override
public SrvRecordSets srvRecordSets() {
return this.srvRecordSets;
}
@Override
public TxtRecordSets txtRecordSets() {
return this.txtRecordSets;
}
@Override
public SoaRecordSet getSoaRecordSet() {
RecordSetInner inner = this.manager().inner().recordSets().get(this.resourceGroupName(), this.name(), "@", RecordType.SOA);
if (inner == null) {
return null;
}
return new SoaRecordSetImpl(inner.getName(), this, inner);
}
@Override
public DnsRecordSetImpl defineARecordSet(String name) {
return recordSets.defineARecordSet(name);
}
@Override
public DnsRecordSetImpl defineAaaaRecordSet(String name) {
return recordSets.defineAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl defineCaaRecordSet(String name) {
return recordSets.defineCaaRecordSet(name);
}
@Override
public DnsZoneImpl withCNameRecordSet(String name, String alias) {
recordSets.withCNameRecordSet(name, alias);
return this;
}
@Override
public DnsRecordSetImpl defineCNameRecordSet(String name) {
return recordSets.defineCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl defineMXRecordSet(String name) {
return recordSets.defineMXRecordSet(name);
}
@Override
public DnsRecordSetImpl defineNSRecordSet(String name) {
return recordSets.defineNSRecordSet(name);
}
@Override
public DnsRecordSetImpl definePtrRecordSet(String name) {
return recordSets.definePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl defineSrvRecordSet(String name) {
return recordSets.defineSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl defineTxtRecordSet(String name) {
return recordSets.defineTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateARecordSet(String name) {
return recordSets.updateARecordSet(name);
}
@Override
public DnsRecordSetImpl updateAaaaRecordSet(String name) {
return recordSets.updateAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCaaRecordSet(String name) {
return recordSets.updateCaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateMXRecordSet(String name) {
return recordSets.updateMXRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCNameRecordSet(String name) {
return recordSets.updateCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl updateNSRecordSet(String name) {
return recordSets.updateNSRecordSet(name);
}
@Override
public DnsRecordSetImpl updatePtrRecordSet(String name) {
return recordSets.updatePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSrvRecordSet(String name) {
return recordSets.updateSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl updateTxtRecordSet(String name) {
return recordSets.updateTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSoaRecord() {
return recordSets.updateSoaRecordSet();
}
@Override
public DnsZoneImpl withoutARecordSet(String name) {
return this.withoutARecordSet(name, null);
}
@Override
public DnsZoneImpl withoutARecordSet(String name, String eTag) {
recordSets.withoutARecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name) {
return this.withoutAaaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name, String eTag) {
recordSets.withoutAaaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name) {
return this.withoutCaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name, String eTag) {
recordSets.withoutCaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name) {
return this.withoutCNameRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name, String eTag) {
recordSets.withoutCNameRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name) {
return this.withoutMXRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name, String eTag) {
recordSets.withoutMXRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name) {
return this.withoutNSRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name, String eTag) {
recordSets.withoutNSRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name) {
return this.withoutPtrRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name, String eTag) {
recordSets.withoutPtrRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name) {
return this.withoutSrvRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name, String eTag) {
recordSets.withoutSrvRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name) {
return this.withoutTxtRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name, String eTag) {
recordSets.withoutTxtRecordSet(name, eTag);
return this;
}
@Override
@Override
public DnsZoneImpl withETagCheck(String eTagValue) {
this.eTagState.withExplicitETagCheckOnUpdate(eTagValue);
return this;
}
@Override
public Mono<DnsZone> createResourceAsync() {
return Mono.just(this)
.flatMap(self -> self.manager().inner().zones().createOrUpdateAsync(self.resourceGroupName(),
self.name(), self.inner(), eTagState.ifMatchValueOnUpdate(self.inner().etag()), eTagState.ifNonMatchValueOnCreate()))
.map(innerToFluentMap(this))
.map(dnsZone -> {
this.eTagState.clear();
return dnsZone;
});
}
@Override
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) {
return Mono.just(true)
.map(ignored -> {
recordSets.clear();
return ignored;
})
.then();
}
@Override
public Mono<DnsZone> refreshAsync() {
return super.refreshAsync()
.map(dnsZone -> {
DnsZoneImpl impl = (DnsZoneImpl) dnsZone;
impl.initRecordSets();
return impl;
});
}
@Override
protected Mono<ZoneInner> getInnerAsync() {
return this.manager().inner().zones().getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
private void initRecordSets() {
this.aRecordSets = new ARecordSetsImpl(this);
this.aaaaRecordSets = new AaaaRecordSetsImpl(this);
this.caaRecordSets = new CaaRecordSetsImpl(this);
this.cnameRecordSets = new CNameRecordSetsImpl(this);
this.mxRecordSets = new MXRecordSetsImpl(this);
this.nsRecordSets = new NSRecordSetsImpl(this);
this.ptrRecordSets = new PtrRecordSetsImpl(this);
this.srvRecordSets = new SrvRecordSetsImpl(this);
this.txtRecordSets = new TxtRecordSetsImpl(this);
this.recordSets.clear();
}
private PagedIterable<DnsRecordSet> listRecordSetsIntern(String recordSetSuffix, Integer pageSize) {
final DnsZoneImpl self = this;
PagedFlux<DnsRecordSet> recordSets = PagedConverter.flatMapPage(
this.manager().inner().recordSets().listByDnsZoneAsync(this.resourceGroupName(), this.name(), pageSize, recordSetSuffix),
inner -> {
DnsRecordSet recordSet = new DnsRecordSetImpl(inner.getName(), inner.getType(), self, inner);
switch (recordSet.recordType()) {
case A:
return Mono.just(new ARecordSetImpl(inner.getName(), self, inner));
case AAAA:
return Mono.just(new AaaaRecordSetImpl(inner.getName(), self, inner));
case CAA:
return Mono.just(new CaaRecordSetImpl(inner.getName(), self, inner));
case CNAME:
return Mono.just(new CNameRecordSetImpl(inner.getName(), self, inner));
case MX:
return Mono.just(new MXRecordSetImpl(inner.getName(), self, inner));
case NS:
return Mono.just(new NSRecordSetImpl(inner.getName(), self, inner));
case PTR:
return Mono.just(new PtrRecordSetImpl(inner.getName(), self, inner));
case SOA:
return Mono.just(new SoaRecordSetImpl(inner.getName(), self, inner));
case SRV:
return Mono.just(new SrvRecordSetImpl(inner.getName(), self, inner));
case TXT:
return Mono.just(new TxtRecordSetImpl(inner.getName(), self, inner));
default:
return Mono.just(recordSet);
}
});
return new PagedIterable<>(recordSets);
}
@Override
public DnsZoneImpl withPublicAccess() {
this.inner().withZoneType(ZoneType.PUBLIC);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess() {
this.inner().withZoneType(ZoneType.PRIVATE);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess(List<String> registrationVirtualNetworkIds, List<String> resolutionVirtualNetworkIds) {
this.withPrivateAccess();
this.inner().withRegistrationVirtualNetworks(new ArrayList<>());
this.inner().withResolutionVirtualNetworks(new ArrayList<>());
for (String rvnId : registrationVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().registrationVirtualNetworks().add(sb);
}
for (String rvnId : resolutionVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().resolutionVirtualNetworks().add(sb);
}
return this;
}
} | class DnsZoneImpl
extends GroupableResourceImpl<
DnsZone,
ZoneInner,
DnsZoneImpl,
DnsZoneManager>
implements
DnsZone,
DnsZone.Definition,
DnsZone.Update {
private ARecordSets aRecordSets;
private AaaaRecordSets aaaaRecordSets;
private CaaRecordSets caaRecordSets;
private CNameRecordSets cnameRecordSets;
private MXRecordSets mxRecordSets;
private NSRecordSets nsRecordSets;
private PtrRecordSets ptrRecordSets;
private SrvRecordSets srvRecordSets;
private TxtRecordSets txtRecordSets;
private DnsRecordSetsImpl recordSets;
private final ETagState eTagState = new ETagState();
DnsZoneImpl(String name, final ZoneInner innerModel, final DnsZoneManager manager) {
super(name, innerModel, manager);
this.recordSets = new DnsRecordSetsImpl(this);
initRecordSets();
if (isInCreateMode()) {
this.inner().withZoneType(ZoneType.PUBLIC);
}
}
@Override
public long maxNumberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().maxNumberOfRecordSets());
}
@Override
public long numberOfRecordSets() {
return Utils.toPrimitiveLong(this.inner().numberOfRecordSets());
}
@Override
public String eTag() {
return this.inner().etag();
}
@Override
public ZoneType accessType() {
return this.inner().zoneType();
}
@Override
public List<String> registrationVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().registrationVirtualNetworks() != null) {
for (SubResource sb : this.inner().registrationVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public List<String> resolutionVirtualNetworkIds() {
List<String> list = new ArrayList<>();
if (this.inner().resolutionVirtualNetworks() != null) {
for (SubResource sb : this.inner().resolutionVirtualNetworks()) {
list.add(sb.getId());
}
}
return list;
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets() {
return this.listRecordSetsIntern(null, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix) {
return this.listRecordSetsIntern(recordSetNameSuffix, null);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(int pageSize) {
return this.listRecordSetsIntern(null, pageSize);
}
@Override
public PagedIterable<DnsRecordSet> listRecordSets(String recordSetNameSuffix, int pageSize) {
return this.listRecordSetsIntern(recordSetNameSuffix, pageSize);
}
@Override
public List<String> nameServers() {
if (this.inner() == null) {
return new ArrayList<>();
}
return this.inner().nameServers();
}
@Override
public ARecordSets aRecordSets() {
return this.aRecordSets;
}
@Override
public AaaaRecordSets aaaaRecordSets() {
return this.aaaaRecordSets;
}
@Override
public CaaRecordSets caaRecordSets() {
return this.caaRecordSets;
}
@Override
public CNameRecordSets cNameRecordSets() {
return this.cnameRecordSets;
}
@Override
public MXRecordSets mxRecordSets() {
return this.mxRecordSets;
}
@Override
public NSRecordSets nsRecordSets() {
return this.nsRecordSets;
}
@Override
public PtrRecordSets ptrRecordSets() {
return this.ptrRecordSets;
}
@Override
public SrvRecordSets srvRecordSets() {
return this.srvRecordSets;
}
@Override
public TxtRecordSets txtRecordSets() {
return this.txtRecordSets;
}
@Override
public SoaRecordSet getSoaRecordSet() {
RecordSetInner inner = this.manager().inner().recordSets().get(this.resourceGroupName(), this.name(), "@", RecordType.SOA);
if (inner == null) {
return null;
}
return new SoaRecordSetImpl(inner.getName(), this, inner);
}
@Override
public DnsRecordSetImpl defineARecordSet(String name) {
return recordSets.defineARecordSet(name);
}
@Override
public DnsRecordSetImpl defineAaaaRecordSet(String name) {
return recordSets.defineAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl defineCaaRecordSet(String name) {
return recordSets.defineCaaRecordSet(name);
}
@Override
public DnsZoneImpl withCNameRecordSet(String name, String alias) {
recordSets.withCNameRecordSet(name, alias);
return this;
}
@Override
public DnsRecordSetImpl defineCNameRecordSet(String name) {
return recordSets.defineCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl defineMXRecordSet(String name) {
return recordSets.defineMXRecordSet(name);
}
@Override
public DnsRecordSetImpl defineNSRecordSet(String name) {
return recordSets.defineNSRecordSet(name);
}
@Override
public DnsRecordSetImpl definePtrRecordSet(String name) {
return recordSets.definePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl defineSrvRecordSet(String name) {
return recordSets.defineSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl defineTxtRecordSet(String name) {
return recordSets.defineTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateARecordSet(String name) {
return recordSets.updateARecordSet(name);
}
@Override
public DnsRecordSetImpl updateAaaaRecordSet(String name) {
return recordSets.updateAaaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCaaRecordSet(String name) {
return recordSets.updateCaaRecordSet(name);
}
@Override
public DnsRecordSetImpl updateMXRecordSet(String name) {
return recordSets.updateMXRecordSet(name);
}
@Override
public DnsRecordSetImpl updateCNameRecordSet(String name) {
return recordSets.updateCNameRecordSet(name);
}
@Override
public DnsRecordSetImpl updateNSRecordSet(String name) {
return recordSets.updateNSRecordSet(name);
}
@Override
public DnsRecordSetImpl updatePtrRecordSet(String name) {
return recordSets.updatePtrRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSrvRecordSet(String name) {
return recordSets.updateSrvRecordSet(name);
}
@Override
public DnsRecordSetImpl updateTxtRecordSet(String name) {
return recordSets.updateTxtRecordSet(name);
}
@Override
public DnsRecordSetImpl updateSoaRecord() {
return recordSets.updateSoaRecordSet();
}
@Override
public DnsZoneImpl withoutARecordSet(String name) {
return this.withoutARecordSet(name, null);
}
@Override
public DnsZoneImpl withoutARecordSet(String name, String eTag) {
recordSets.withoutARecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name) {
return this.withoutAaaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutAaaaRecordSet(String name, String eTag) {
recordSets.withoutAaaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name) {
return this.withoutCaaRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCaaRecordSet(String name, String eTag) {
recordSets.withoutCaaRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name) {
return this.withoutCNameRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutCNameRecordSet(String name, String eTag) {
recordSets.withoutCNameRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name) {
return this.withoutMXRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutMXRecordSet(String name, String eTag) {
recordSets.withoutMXRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name) {
return this.withoutNSRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutNSRecordSet(String name, String eTag) {
recordSets.withoutNSRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name) {
return this.withoutPtrRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutPtrRecordSet(String name, String eTag) {
recordSets.withoutPtrRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name) {
return this.withoutSrvRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutSrvRecordSet(String name, String eTag) {
recordSets.withoutSrvRecordSet(name, eTag);
return this;
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name) {
return this.withoutTxtRecordSet(name, null);
}
@Override
public DnsZoneImpl withoutTxtRecordSet(String name, String eTag) {
recordSets.withoutTxtRecordSet(name, eTag);
return this;
}
@Override
@Override
public DnsZoneImpl withETagCheck(String eTagValue) {
this.eTagState.withExplicitETagCheckOnUpdate(eTagValue);
return this;
}
@Override
public Mono<DnsZone> createResourceAsync() {
return Mono.just(this)
.flatMap(self -> self.manager().inner().zones().createOrUpdateAsync(self.resourceGroupName(),
self.name(), self.inner(), eTagState.ifMatchValueOnUpdate(self.inner().etag()), eTagState.ifNonMatchValueOnCreate()))
.map(innerToFluentMap(this))
.map(dnsZone -> {
this.eTagState.clear();
return dnsZone;
});
}
@Override
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) {
return Mono.just(true)
.map(ignored -> {
recordSets.clear();
return ignored;
})
.then();
}
@Override
public Mono<DnsZone> refreshAsync() {
return super.refreshAsync()
.map(dnsZone -> {
DnsZoneImpl impl = (DnsZoneImpl) dnsZone;
impl.initRecordSets();
return impl;
});
}
@Override
protected Mono<ZoneInner> getInnerAsync() {
return this.manager().inner().zones().getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
private void initRecordSets() {
this.aRecordSets = new ARecordSetsImpl(this);
this.aaaaRecordSets = new AaaaRecordSetsImpl(this);
this.caaRecordSets = new CaaRecordSetsImpl(this);
this.cnameRecordSets = new CNameRecordSetsImpl(this);
this.mxRecordSets = new MXRecordSetsImpl(this);
this.nsRecordSets = new NSRecordSetsImpl(this);
this.ptrRecordSets = new PtrRecordSetsImpl(this);
this.srvRecordSets = new SrvRecordSetsImpl(this);
this.txtRecordSets = new TxtRecordSetsImpl(this);
this.recordSets.clear();
}
private PagedIterable<DnsRecordSet> listRecordSetsIntern(String recordSetSuffix, Integer pageSize) {
final DnsZoneImpl self = this;
PagedFlux<DnsRecordSet> recordSets = PagedConverter.flatMapPage(
this.manager().inner().recordSets().listByDnsZoneAsync(this.resourceGroupName(), this.name(), pageSize, recordSetSuffix),
inner -> {
DnsRecordSet recordSet = new DnsRecordSetImpl(inner.getName(), inner.getType(), self, inner);
switch (recordSet.recordType()) {
case A:
return Mono.just(new ARecordSetImpl(inner.getName(), self, inner));
case AAAA:
return Mono.just(new AaaaRecordSetImpl(inner.getName(), self, inner));
case CAA:
return Mono.just(new CaaRecordSetImpl(inner.getName(), self, inner));
case CNAME:
return Mono.just(new CNameRecordSetImpl(inner.getName(), self, inner));
case MX:
return Mono.just(new MXRecordSetImpl(inner.getName(), self, inner));
case NS:
return Mono.just(new NSRecordSetImpl(inner.getName(), self, inner));
case PTR:
return Mono.just(new PtrRecordSetImpl(inner.getName(), self, inner));
case SOA:
return Mono.just(new SoaRecordSetImpl(inner.getName(), self, inner));
case SRV:
return Mono.just(new SrvRecordSetImpl(inner.getName(), self, inner));
case TXT:
return Mono.just(new TxtRecordSetImpl(inner.getName(), self, inner));
default:
return Mono.just(recordSet);
}
});
return new PagedIterable<>(recordSets);
}
@Override
public DnsZoneImpl withPublicAccess() {
this.inner().withZoneType(ZoneType.PUBLIC);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess() {
this.inner().withZoneType(ZoneType.PRIVATE);
this.inner().withRegistrationVirtualNetworks(null);
this.inner().withResolutionVirtualNetworks(null);
return this;
}
@Override
public DnsZoneImpl withPrivateAccess(List<String> registrationVirtualNetworkIds, List<String> resolutionVirtualNetworkIds) {
this.withPrivateAccess();
this.inner().withRegistrationVirtualNetworks(new ArrayList<>());
this.inner().withResolutionVirtualNetworks(new ArrayList<>());
for (String rvnId : registrationVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().registrationVirtualNetworks().add(sb);
}
for (String rvnId : resolutionVirtualNetworkIds) {
SubResource sb = new SubResource();
sb.setId(rvnId);
this.inner().resolutionVirtualNetworks().add(sb);
}
return this;
}
} |
I think you need to check length for each of these? Maybe `this.subscriptionId = splits.length > 1 ? splits[1] : null` | private ResourceId(final String id) {
if (id == null) {
this.subscriptionId = null;
this.resourceGroupName = null;
this.name = null;
this.providerNamespace = null;
this.resourceType = null;
this.id = null;
this.parentId = null;
return;
} else {
String[] splits = (id.startsWith("/")) ? id.substring(1).split("/") : id.split("/");
if (splits.length % 2 == 1) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.id = id;
if (splits.length < 2) {
throw new InvalidParameterException(badIdErrorText(id));
} else {
this.name = splits[splits.length - 1];
this.resourceType = splits[splits.length - 2];
}
if (splits.length < 10) {
this.parentId = null;
} else {
String[] parentSplits = new String[splits.length - 2];
System.arraycopy(splits, 0, parentSplits, 0, splits.length - 2);
this.parentId = "/" + String.join("/", parentSplits);
}
if (!splits[0].equalsIgnoreCase("subscriptions")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.subscriptionId = splits[1];
if (!splits[2].equalsIgnoreCase("resourceGroups")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.resourceGroupName = splits[3];
if (!splits[4].equalsIgnoreCase("providers")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.providerNamespace = splits[5];
}
} | if (!splits[0].equalsIgnoreCase("subscriptions")) { | private ResourceId(final String id) {
if (id == null) {
this.subscriptionId = null;
this.resourceGroupName = null;
this.name = null;
this.providerNamespace = null;
this.resourceType = null;
this.id = null;
this.parentId = null;
return;
} else {
String[] splits = (id.startsWith("/")) ? id.substring(1).split("/") : id.split("/");
if (splits.length % 2 == 1) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.id = id;
if (splits.length < 2) {
throw new InvalidParameterException(badIdErrorText(id));
} else {
this.name = splits[splits.length - 1];
this.resourceType = splits[splits.length - 2];
}
if (splits.length < 10) {
this.parentId = null;
} else {
String[] parentSplits = new String[splits.length - 2];
System.arraycopy(splits, 0, parentSplits, 0, splits.length - 2);
this.parentId = "/" + String.join("/", parentSplits);
}
if (splits.length > 0 && !splits[0].equalsIgnoreCase("subscriptions")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.subscriptionId = splits.length > 1 ? splits[1] : null;
if (splits.length > 2 && !splits[2].equalsIgnoreCase("resourceGroups")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.resourceGroupName = splits.length > 3 ? splits[3] : null;
if (splits.length > 4 && !splits[4].equalsIgnoreCase("providers")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.providerNamespace = splits.length > 5 ? splits[5] : null;
}
} | class ResourceId {
private final String subscriptionId;
private final String resourceGroupName;
private final String name;
private final String providerNamespace;
private final String resourceType;
private final String id;
private final String parentId;
private static String badIdErrorText(String id) {
return String.format("The specified ID `%s` is not a valid Azure resource ID.", id);
}
/**
* Returns parsed ResourceId object for a given resource id.
*
* @param id of the resource
* @return ResourceId object
*/
public static ResourceId fromString(String id) {
return new ResourceId(id);
}
/**
* @return subscriptionId of the resource.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* @return resourceGroupName of the resource.
*/
public String resourceGroupName() {
return this.resourceGroupName;
}
/**
* @return name of the resource.
*/
public String name() {
return this.name;
}
/**
* @return parent resource id of the resource if any, otherwise null.
*/
public ResourceId parent() {
if (this.id == null || this.parentId == null) {
return null;
} else {
return fromString(this.parentId);
}
}
/**
* @return name of the provider.
*/
public String providerNamespace() {
return this.providerNamespace;
}
/**
* @return type of the resource.
*/
public String resourceType() {
return this.resourceType;
}
/**
* @return full type of the resource.
*/
public String fullResourceType() {
if (this.parentId == null) {
return this.providerNamespace + "/" + this.resourceType;
} else {
return this.parent().fullResourceType() + "/" + this.resourceType;
}
}
/**
* @return the id of the resource.
*/
public String id() {
return id;
}
} | class ResourceId {
private final String subscriptionId;
private final String resourceGroupName;
private final String name;
private final String providerNamespace;
private final String resourceType;
private final String id;
private final String parentId;
private static String badIdErrorText(String id) {
return String.format("The specified ID `%s` is not a valid Azure resource ID.", id);
}
/**
* Returns parsed ResourceId object for a given resource id.
*
* @param id of the resource
* @return ResourceId object
*/
public static ResourceId fromString(String id) {
return new ResourceId(id);
}
/**
* @return subscriptionId of the resource.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* @return resourceGroupName of the resource.
*/
public String resourceGroupName() {
return this.resourceGroupName;
}
/**
* @return name of the resource.
*/
public String name() {
return this.name;
}
/**
* @return parent resource id of the resource if any, otherwise null.
*/
public ResourceId parent() {
if (this.id == null || this.parentId == null) {
return null;
} else {
return fromString(this.parentId);
}
}
/**
* @return name of the provider.
*/
public String providerNamespace() {
return this.providerNamespace;
}
/**
* @return type of the resource.
*/
public String resourceType() {
return this.resourceType;
}
/**
* @return full type of the resource.
*/
public String fullResourceType() {
if (this.parentId == null) {
return this.providerNamespace + "/" + this.resourceType;
} else {
return this.parent().fullResourceType() + "/" + this.resourceType;
}
}
/**
* @return the id of the resource.
*/
public String id() {
return id;
}
} |
I'll fix it. I just take them out of `switch..case`. | private ResourceId(final String id) {
if (id == null) {
this.subscriptionId = null;
this.resourceGroupName = null;
this.name = null;
this.providerNamespace = null;
this.resourceType = null;
this.id = null;
this.parentId = null;
return;
} else {
String[] splits = (id.startsWith("/")) ? id.substring(1).split("/") : id.split("/");
if (splits.length % 2 == 1) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.id = id;
if (splits.length < 2) {
throw new InvalidParameterException(badIdErrorText(id));
} else {
this.name = splits[splits.length - 1];
this.resourceType = splits[splits.length - 2];
}
if (splits.length < 10) {
this.parentId = null;
} else {
String[] parentSplits = new String[splits.length - 2];
System.arraycopy(splits, 0, parentSplits, 0, splits.length - 2);
this.parentId = "/" + String.join("/", parentSplits);
}
if (!splits[0].equalsIgnoreCase("subscriptions")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.subscriptionId = splits[1];
if (!splits[2].equalsIgnoreCase("resourceGroups")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.resourceGroupName = splits[3];
if (!splits[4].equalsIgnoreCase("providers")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.providerNamespace = splits[5];
}
} | if (!splits[0].equalsIgnoreCase("subscriptions")) { | private ResourceId(final String id) {
if (id == null) {
this.subscriptionId = null;
this.resourceGroupName = null;
this.name = null;
this.providerNamespace = null;
this.resourceType = null;
this.id = null;
this.parentId = null;
return;
} else {
String[] splits = (id.startsWith("/")) ? id.substring(1).split("/") : id.split("/");
if (splits.length % 2 == 1) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.id = id;
if (splits.length < 2) {
throw new InvalidParameterException(badIdErrorText(id));
} else {
this.name = splits[splits.length - 1];
this.resourceType = splits[splits.length - 2];
}
if (splits.length < 10) {
this.parentId = null;
} else {
String[] parentSplits = new String[splits.length - 2];
System.arraycopy(splits, 0, parentSplits, 0, splits.length - 2);
this.parentId = "/" + String.join("/", parentSplits);
}
if (splits.length > 0 && !splits[0].equalsIgnoreCase("subscriptions")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.subscriptionId = splits.length > 1 ? splits[1] : null;
if (splits.length > 2 && !splits[2].equalsIgnoreCase("resourceGroups")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.resourceGroupName = splits.length > 3 ? splits[3] : null;
if (splits.length > 4 && !splits[4].equalsIgnoreCase("providers")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.providerNamespace = splits.length > 5 ? splits[5] : null;
}
} | class ResourceId {
private final String subscriptionId;
private final String resourceGroupName;
private final String name;
private final String providerNamespace;
private final String resourceType;
private final String id;
private final String parentId;
private static String badIdErrorText(String id) {
return String.format("The specified ID `%s` is not a valid Azure resource ID.", id);
}
/**
* Returns parsed ResourceId object for a given resource id.
*
* @param id of the resource
* @return ResourceId object
*/
public static ResourceId fromString(String id) {
return new ResourceId(id);
}
/**
* @return subscriptionId of the resource.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* @return resourceGroupName of the resource.
*/
public String resourceGroupName() {
return this.resourceGroupName;
}
/**
* @return name of the resource.
*/
public String name() {
return this.name;
}
/**
* @return parent resource id of the resource if any, otherwise null.
*/
public ResourceId parent() {
if (this.id == null || this.parentId == null) {
return null;
} else {
return fromString(this.parentId);
}
}
/**
* @return name of the provider.
*/
public String providerNamespace() {
return this.providerNamespace;
}
/**
* @return type of the resource.
*/
public String resourceType() {
return this.resourceType;
}
/**
* @return full type of the resource.
*/
public String fullResourceType() {
if (this.parentId == null) {
return this.providerNamespace + "/" + this.resourceType;
} else {
return this.parent().fullResourceType() + "/" + this.resourceType;
}
}
/**
* @return the id of the resource.
*/
public String id() {
return id;
}
} | class ResourceId {
private final String subscriptionId;
private final String resourceGroupName;
private final String name;
private final String providerNamespace;
private final String resourceType;
private final String id;
private final String parentId;
private static String badIdErrorText(String id) {
return String.format("The specified ID `%s` is not a valid Azure resource ID.", id);
}
/**
* Returns parsed ResourceId object for a given resource id.
*
* @param id of the resource
* @return ResourceId object
*/
public static ResourceId fromString(String id) {
return new ResourceId(id);
}
/**
* @return subscriptionId of the resource.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* @return resourceGroupName of the resource.
*/
public String resourceGroupName() {
return this.resourceGroupName;
}
/**
* @return name of the resource.
*/
public String name() {
return this.name;
}
/**
* @return parent resource id of the resource if any, otherwise null.
*/
public ResourceId parent() {
if (this.id == null || this.parentId == null) {
return null;
} else {
return fromString(this.parentId);
}
}
/**
* @return name of the provider.
*/
public String providerNamespace() {
return this.providerNamespace;
}
/**
* @return type of the resource.
*/
public String resourceType() {
return this.resourceType;
}
/**
* @return full type of the resource.
*/
public String fullResourceType() {
if (this.parentId == null) {
return this.providerNamespace + "/" + this.resourceType;
} else {
return this.parent().fullResourceType() + "/" + this.resourceType;
}
}
/**
* @return the id of the resource.
*/
public String id() {
return id;
}
} |
done | private ResourceId(final String id) {
if (id == null) {
this.subscriptionId = null;
this.resourceGroupName = null;
this.name = null;
this.providerNamespace = null;
this.resourceType = null;
this.id = null;
this.parentId = null;
return;
} else {
String[] splits = (id.startsWith("/")) ? id.substring(1).split("/") : id.split("/");
if (splits.length % 2 == 1) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.id = id;
if (splits.length < 2) {
throw new InvalidParameterException(badIdErrorText(id));
} else {
this.name = splits[splits.length - 1];
this.resourceType = splits[splits.length - 2];
}
if (splits.length < 10) {
this.parentId = null;
} else {
String[] parentSplits = new String[splits.length - 2];
System.arraycopy(splits, 0, parentSplits, 0, splits.length - 2);
this.parentId = "/" + String.join("/", parentSplits);
}
if (!splits[0].equalsIgnoreCase("subscriptions")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.subscriptionId = splits[1];
if (!splits[2].equalsIgnoreCase("resourceGroups")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.resourceGroupName = splits[3];
if (!splits[4].equalsIgnoreCase("providers")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.providerNamespace = splits[5];
}
} | if (!splits[0].equalsIgnoreCase("subscriptions")) { | private ResourceId(final String id) {
if (id == null) {
this.subscriptionId = null;
this.resourceGroupName = null;
this.name = null;
this.providerNamespace = null;
this.resourceType = null;
this.id = null;
this.parentId = null;
return;
} else {
String[] splits = (id.startsWith("/")) ? id.substring(1).split("/") : id.split("/");
if (splits.length % 2 == 1) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.id = id;
if (splits.length < 2) {
throw new InvalidParameterException(badIdErrorText(id));
} else {
this.name = splits[splits.length - 1];
this.resourceType = splits[splits.length - 2];
}
if (splits.length < 10) {
this.parentId = null;
} else {
String[] parentSplits = new String[splits.length - 2];
System.arraycopy(splits, 0, parentSplits, 0, splits.length - 2);
this.parentId = "/" + String.join("/", parentSplits);
}
if (splits.length > 0 && !splits[0].equalsIgnoreCase("subscriptions")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.subscriptionId = splits.length > 1 ? splits[1] : null;
if (splits.length > 2 && !splits[2].equalsIgnoreCase("resourceGroups")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.resourceGroupName = splits.length > 3 ? splits[3] : null;
if (splits.length > 4 && !splits[4].equalsIgnoreCase("providers")) {
throw new InvalidParameterException(badIdErrorText(id));
}
this.providerNamespace = splits.length > 5 ? splits[5] : null;
}
} | class ResourceId {
private final String subscriptionId;
private final String resourceGroupName;
private final String name;
private final String providerNamespace;
private final String resourceType;
private final String id;
private final String parentId;
private static String badIdErrorText(String id) {
return String.format("The specified ID `%s` is not a valid Azure resource ID.", id);
}
/**
* Returns parsed ResourceId object for a given resource id.
*
* @param id of the resource
* @return ResourceId object
*/
public static ResourceId fromString(String id) {
return new ResourceId(id);
}
/**
* @return subscriptionId of the resource.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* @return resourceGroupName of the resource.
*/
public String resourceGroupName() {
return this.resourceGroupName;
}
/**
* @return name of the resource.
*/
public String name() {
return this.name;
}
/**
* @return parent resource id of the resource if any, otherwise null.
*/
public ResourceId parent() {
if (this.id == null || this.parentId == null) {
return null;
} else {
return fromString(this.parentId);
}
}
/**
* @return name of the provider.
*/
public String providerNamespace() {
return this.providerNamespace;
}
/**
* @return type of the resource.
*/
public String resourceType() {
return this.resourceType;
}
/**
* @return full type of the resource.
*/
public String fullResourceType() {
if (this.parentId == null) {
return this.providerNamespace + "/" + this.resourceType;
} else {
return this.parent().fullResourceType() + "/" + this.resourceType;
}
}
/**
* @return the id of the resource.
*/
public String id() {
return id;
}
} | class ResourceId {
private final String subscriptionId;
private final String resourceGroupName;
private final String name;
private final String providerNamespace;
private final String resourceType;
private final String id;
private final String parentId;
private static String badIdErrorText(String id) {
return String.format("The specified ID `%s` is not a valid Azure resource ID.", id);
}
/**
* Returns parsed ResourceId object for a given resource id.
*
* @param id of the resource
* @return ResourceId object
*/
public static ResourceId fromString(String id) {
return new ResourceId(id);
}
/**
* @return subscriptionId of the resource.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* @return resourceGroupName of the resource.
*/
public String resourceGroupName() {
return this.resourceGroupName;
}
/**
* @return name of the resource.
*/
public String name() {
return this.name;
}
/**
* @return parent resource id of the resource if any, otherwise null.
*/
public ResourceId parent() {
if (this.id == null || this.parentId == null) {
return null;
} else {
return fromString(this.parentId);
}
}
/**
* @return name of the provider.
*/
public String providerNamespace() {
return this.providerNamespace;
}
/**
* @return type of the resource.
*/
public String resourceType() {
return this.resourceType;
}
/**
* @return full type of the resource.
*/
public String fullResourceType() {
if (this.parentId == null) {
return this.providerNamespace + "/" + this.resourceType;
} else {
return this.parent().fullResourceType() + "/" + this.resourceType;
}
}
/**
* @return the id of the resource.
*/
public String id() {
return id;
}
} |
I'd rename the variable to azureCloud | public static void main(String[] args) {
LOGGER.info("---------------------");
LOGGER.info("KEY VAULT - SECRETS");
LOGGER.info("IDENTITY - CREDENTIAL");
LOGGER.info("---------------------");
String authorityHostAlias = System.getenv("AZURE_CLOUD");
String authorityHost = AUTHORITY_HOST_MAP.getOrDefault(
authorityHostAlias, KnownAuthorityHosts.AZURE_CLOUD);
/* DefaultAzureCredentialBuilder() is expecting the following environment variables:
* AZURE_CLIENT_ID
* AZURE_CLIENT_SECRET
* AZURE_TENANT_ID
*/
secretClient = new SecretClientBuilder()
.vaultUrl(System.getenv("AZURE_PROJECT_URL"))
.credential(
new DefaultAzureCredentialBuilder()
.authorityHost(authorityHost)
.build()
).buildClient();
try {
setSecret();
getSecret();
} finally {
deleteSecret();
}
} | String authorityHostAlias = System.getenv("AZURE_CLOUD"); | public static void main(String[] args) {
LOGGER.info("---------------------");
LOGGER.info("KEY VAULT - SECRETS");
LOGGER.info("IDENTITY - CREDENTIAL");
LOGGER.info("---------------------");
String azureCloud = System.getenv("AZURE_CLOUD");
String authorityHost = AUTHORITY_HOST_MAP.getOrDefault(
azureCloud, KnownAuthorityHosts.AZURE_CLOUD);
/* DefaultAzureCredentialBuilder() is expecting the following environment variables:
* AZURE_CLIENT_ID
* AZURE_CLIENT_SECRET
* AZURE_TENANT_ID
*/
secretClient = new SecretClientBuilder()
.vaultUrl(System.getenv("AZURE_PROJECT_URL"))
.credential(
new DefaultAzureCredentialBuilder()
.authorityHost(authorityHost)
.build()
).buildClient();
try {
setSecret();
getSecret();
} finally {
deleteSecret();
}
} | class KeyVaultSecrets {
private static SecretClient secretClient;
private static final String SECRET_NAME = "MySecretName-" + UUID.randomUUID();
private static final String SECRET_VALUE = "MySecretValue";
private static final Logger LOGGER = LoggerFactory.getLogger(KeyVaultSecrets.class);
private static HashMap<String, String> AUTHORITY_HOST_MAP = new HashMap<String, String>() {{
put("AzureCloud", KnownAuthorityHosts.AZURE_CLOUD);
put("AzureChinaCloud", KnownAuthorityHosts.AZURE_CHINA_CLOUD);
put("AzureGermanCloud", KnownAuthorityHosts.AZURE_GERMAN_CLOUD);
put("AzureUSGovernment", KnownAuthorityHosts.AZURE_US_GOVERNMENT);
}};
private static void setSecret() {
LOGGER.info("Setting a secret...");
KeyVaultSecret response = secretClient.setSecret(SECRET_NAME, SECRET_VALUE);
LOGGER.info("\tDONE: ({},{}).", response.getName(), response.getValue());
}
private static void getSecret() {
LOGGER.info("Getting the secret... ");
KeyVaultSecret response = secretClient.getSecret(SECRET_NAME);
LOGGER.info("\tDONE: secret ({},{}) retrieved.", response.getName(), response.getValue());
}
private static void deleteSecret() {
LOGGER.info("Deleting the secret... ");
SyncPoller<DeletedSecret, Void> poller = secretClient.beginDeleteSecret(SECRET_NAME);
DeletedSecret response = poller.poll().getValue();
LOGGER.info("\tDONE: deleted.");
}
} | class KeyVaultSecrets {
private static SecretClient secretClient;
private static final String SECRET_NAME = "MySecretName-" + UUID.randomUUID();
private static final String SECRET_VALUE = "MySecretValue";
private static final Logger LOGGER = LoggerFactory.getLogger(KeyVaultSecrets.class);
private static HashMap<String, String> AUTHORITY_HOST_MAP = new HashMap<String, String>() {{
put("AzureCloud", KnownAuthorityHosts.AZURE_CLOUD);
put("AzureChinaCloud", KnownAuthorityHosts.AZURE_CHINA_CLOUD);
put("AzureGermanCloud", KnownAuthorityHosts.AZURE_GERMAN_CLOUD);
put("AzureUSGovernment", KnownAuthorityHosts.AZURE_US_GOVERNMENT);
}};
private static void setSecret() {
LOGGER.info("Setting a secret...");
KeyVaultSecret response = secretClient.setSecret(SECRET_NAME, SECRET_VALUE);
LOGGER.info("\tDONE: ({},{}).", response.getName(), response.getValue());
}
private static void getSecret() {
LOGGER.info("Getting the secret... ");
KeyVaultSecret response = secretClient.getSecret(SECRET_NAME);
LOGGER.info("\tDONE: secret ({},{}) retrieved.", response.getName(), response.getValue());
}
private static void deleteSecret() {
LOGGER.info("Deleting the secret... ");
SyncPoller<DeletedSecret, Void> poller = secretClient.beginDeleteSecret(SECRET_NAME);
DeletedSecret response = poller.poll().getValue();
LOGGER.info("\tDONE: deleted.");
}
} |
partition split normally takes 10-20 min to complete. how are we ensuring that the test sees partition split completing within its life time? | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic()) | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
don't catch `InterruptedException` if that's thrown, we can fail the test. | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | log.error(e.getMessage()); | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
don't catch `InterruptedException` if that's thrown, we can fail the test. | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | } catch (InterruptedException e) { | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
here and elsewhere please. we don't need to catch this, let it get thrown | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | } catch (InterruptedException e) { | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
There's a check later to ensure that we see at least 2 partitions. In regular case it takes couple minutes (not 10 minutes) to detect the split. | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic()) | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
The try/catch is needed to avoid compilation errors such as: "Error:(440, 29) java: unreported exception java.lang.InterruptedException; must be caught or declared to be thrown" | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | } catch (InterruptedException e) { | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
We need to in this case, in order to avoid compilation errors... | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | log.error(e.getMessage()); | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
why can't we add exception to the test method signature? | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | log.error(e.getMessage()); | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
makes sense here. so please use this instead to translate checked to unchecked exception. https://projectreactor.io/docs/core/release/api/reactor/core/Exceptions.html#propagate-java.lang.Throwable- if `InterruptedException` we should error out. | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | } catch (InterruptedException e) { | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
Good suggestion, but it goes way beyond what we need to address at this time which is to release a fix for the CFP ASAP. Feel free to open an issue and assign it to me to refactor these tests and remove the handling of the exception. | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | } catch (InterruptedException e) { | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
replied on a similar comment below... | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | log.error(e.getMessage()); | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
@milismsft I have already signed off on v3, and I agree with you on v3 as this is hotfix for v3 it should go out as soon as possible. IMO, on v4 we should do the right thing (this is not in prod yet) rather than introducing technical debt. | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | } catch (InterruptedException e) { | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
V4 latest release is currently used by couple customers a preview. | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | } catch (InterruptedException e) { | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
I'll deal with the debt later; the changes you're requesting go beyond what this PR/work item should address. IMHO et's keep things simple enough in case we need to review this later and use a different PR to address other things. | public void readFeedDocumentsAfterSplit() {
createdFeedCollectionForSplit = createFeedCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
setupReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
try {
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
FeedOptions feedOptions = new FeedOptions();
feedOptions.setRequestContinuation(null);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap( value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, feedOptions);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if ( count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
} | } catch (InterruptedException e) { | public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.build();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit.readProvisionedThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit.replaceProvisionedThroughput(FEED_COLLECTION_THROUGHPUT).subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private CosmosAsyncContainer createdFeedCollection;
private CosmosAsyncContainer createdFeedCollectionForSplit;
private CosmosAsyncContainer createdLeaseCollection;
private List<CosmosItemProperties> createdDocuments;
private static Map<String, JsonNode> receivedDocuments;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() {
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler())
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() {
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
}
setupReadFeedDocuments(createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
receivedDocuments = new ConcurrentHashMap<>();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap( value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT))
)
.doOnSuccess(aVoid -> {
try {
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2))
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
}
waitToReceiveDocuments(40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
receivedDocuments.clear();
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler() {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(long timeoutInMillisecond, long count) {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error(e.getMessage());
}
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
safeDeleteCollection(createdFeedCollectionForSplit);
try {
Thread.sleep(500);
} catch (Exception e){ }
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
receivedDocuments = new ConcurrentHashMap<>();
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments = bulkInsertBlocking(feedCollection, docDefList);
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties("leases_" + createdDatabase.getId(), "/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<CosmosItemProperties> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(OffsetDateTime.now().minusDays(1))
.setMinScaleCount(1)
.setMaxScaleCount(3)
.setExistingLeasesDiscarded(true)
)
.build();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (CosmosItemProperties item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.build();
ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.changeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.build();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
FeedOptions feedOptions = new FeedOptions();
createdLeaseCollection.queryItems(querySpec, feedOptions, CosmosItemProperties.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<CosmosItemProperties> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<CosmosItemProperties> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<CosmosItemProperties> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<CosmosItemProperties> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private CosmosItemProperties getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
CosmosItemProperties doc = new CosmosItemProperties(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
LeaseToken, ContinuationToken, Owner are Camel case, but timestamp, id, etag are not is that expected? also for Owner, timestamp, ContinuationToken, LeaseToken we probably should define constants. | public void serialize(ServiceItemLease lease, JsonGenerator writer, SerializerProvider serializerProvider) {
try {
writer.writeStartObject();
writer.writeStringField(Constants.Properties.ID, lease.getId());
writer.writeStringField(Constants.Properties.E_TAG, lease.getETag());
writer.writeStringField("LeaseToken", lease.getLeaseToken());
writer.writeStringField("ContinuationToken", lease.getContinuationToken());
writer.writeStringField("timestamp", lease.getTimestamp());
writer.writeStringField("Owner", lease.getOwner());
writer.writeEndObject();
} catch (IOException e) {
throw new IllegalStateException(e);
}
} | writer.writeStringField("Owner", lease.getOwner()); | public void serialize(ServiceItemLease lease, JsonGenerator writer, SerializerProvider serializerProvider) {
try {
writer.writeStartObject();
writer.writeStringField(Constants.Properties.ID, lease.getId());
writer.writeStringField(Constants.Properties.E_TAG, lease.getETag());
writer.writeStringField(PROPERTY_NAME_LEASE_TOKEN, lease.getLeaseToken());
writer.writeStringField(PROPERTY_NAME_CONTINUATION_TOKEN, lease.getContinuationToken());
writer.writeStringField(PROPERTY_NAME_TIMESTAMP, lease.getTimestamp());
writer.writeStringField(PROPERTY_NAME_OWNER, lease.getOwner());
writer.writeEndObject();
} catch (IOException e) {
throw new IllegalStateException(e);
}
} | class members
private static final long serialVersionUID = 1L;
protected ServiceItemLeaseJsonSerializer() { this(null); } | class members
private static final long serialVersionUID = 1L;
protected ServiceItemLeaseJsonSerializer() { this(null); } |
The current naming convention follows .Net and the content of a lease document as written from that code. The idea behind this design is that you can have CFP instances resuming work from both platforms, Java and .NET. The ServiceItemLease is internal only (not exposed to the user); for the easy of debugging keeping the same names for members and the content of the lease document as written in Cosmos is actually beneficial. | public void serialize(ServiceItemLease lease, JsonGenerator writer, SerializerProvider serializerProvider) {
try {
writer.writeStartObject();
writer.writeStringField(Constants.Properties.ID, lease.getId());
writer.writeStringField(Constants.Properties.E_TAG, lease.getETag());
writer.writeStringField("LeaseToken", lease.getLeaseToken());
writer.writeStringField("ContinuationToken", lease.getContinuationToken());
writer.writeStringField("timestamp", lease.getTimestamp());
writer.writeStringField("Owner", lease.getOwner());
writer.writeEndObject();
} catch (IOException e) {
throw new IllegalStateException(e);
}
} | writer.writeStringField("Owner", lease.getOwner()); | public void serialize(ServiceItemLease lease, JsonGenerator writer, SerializerProvider serializerProvider) {
try {
writer.writeStartObject();
writer.writeStringField(Constants.Properties.ID, lease.getId());
writer.writeStringField(Constants.Properties.E_TAG, lease.getETag());
writer.writeStringField(PROPERTY_NAME_LEASE_TOKEN, lease.getLeaseToken());
writer.writeStringField(PROPERTY_NAME_CONTINUATION_TOKEN, lease.getContinuationToken());
writer.writeStringField(PROPERTY_NAME_TIMESTAMP, lease.getTimestamp());
writer.writeStringField(PROPERTY_NAME_OWNER, lease.getOwner());
writer.writeEndObject();
} catch (IOException e) {
throw new IllegalStateException(e);
}
} | class members
private static final long serialVersionUID = 1L;
protected ServiceItemLeaseJsonSerializer() { this(null); } | class members
private static final long serialVersionUID = 1L;
protected ServiceItemLeaseJsonSerializer() { this(null); } |
May be explaining here that `To disable auto-renew, user have to set MaxAutoRenewDuration to zero.` And default is true | public Flux<ServiceBusReceivedMessage> receive(ReceiveAsyncOptions options) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (Objects.isNull(options)) {
return fluxError(logger, new NullPointerException("'options' cannot be null"));
} else if (options.getMaxAutoRenewDuration() != null && options.getMaxAutoRenewDuration().isNegative()) {
return fluxError(logger, new IllegalArgumentException("'maxAutoRenewDuration' cannot be negative."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && options.isEnableAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Auto-complete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath, options)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
} | return fluxError(logger, new IllegalArgumentException("'maxAutoRenewDuration' cannot be negative.")); | public Flux<ServiceBusReceivedMessage> receive(ReceiveAsyncOptions options) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (Objects.isNull(options)) {
return fluxError(logger, new NullPointerException("'options' cannot be null"));
} else if (options.getMaxAutoRenewDuration() != null && options.getMaxAutoRenewDuration().isNegative()) {
return fluxError(logger, new IllegalArgumentException("'maxAutoRenewDuration' cannot be negative."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && options.isEnableAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Auto-complete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath, options)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final ReceiveAsyncOptions defaultReceiveOptions;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers. Key: linkName Value: consumer associated with that
* linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiverOptions receiverOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiverOptions.getPrefetchCount();
this.receiveMode = receiverOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
this.defaultReceiveOptions = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(connectionProcessor.getRetryOptions().getTryTimeout());
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity and completes them
* when they are finished processing.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
return receive(defaultReceiveOptions);
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
* @throws NullPointerException if {@code options} is null.
* @throws IllegalArgumentException if {@link ReceiveAsyncOptions
* duration} is negative.
*/
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(String lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final String lockToken = message.getLockToken();
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName, ReceiveAsyncOptions options) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName, null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor, context));
final boolean isAutoLockRenewal = options.getMaxAutoRenewDuration() != null
&& !options.getMaxAutoRenewDuration().isZero();
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
options.isEnableAutoComplete(), isAutoLockRenewal, options.getMaxAutoRenewDuration(),
connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final ReceiveAsyncOptions defaultReceiveOptions;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers. Key: linkName Value: consumer associated with that
* linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiverOptions receiverOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiverOptions.getPrefetchCount();
this.receiveMode = receiverOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
this.defaultReceiveOptions = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(connectionProcessor.getRetryOptions().getTryTimeout());
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity and completes them
* when they are finished processing.
*
* <p>
* By default, each successfully consumed message is {@link
*
* auto-completion feature will {@link
*
* operation timeout} has elapsed.
* </p>
*
* @return A stream of messages from the Service Bus entity.
* @throws AmqpException if {@link AmqpRetryOptions
* downstream consumers are still processing the message.
*/
public Flux<ServiceBusReceivedMessage> receive() {
return receive(defaultReceiveOptions);
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity with a set of
* options. To disable lock auto-renewal, set {@link ReceiveAsyncOptions
* setMaxAutoRenewDuration} to {@link Duration
*
* @param options Set of options to set when receiving messages.
* @return A stream of messages from the Service Bus entity.
* @throws NullPointerException if {@code options} is null.
* @throws IllegalArgumentException if {@link ReceiveAsyncOptions
* duration} is negative.
*/
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber));
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(String lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final String lockToken = message.getLockToken();
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName, ReceiveAsyncOptions options) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName, null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor, context));
final boolean isAutoLockRenewal = options.getMaxAutoRenewDuration() != null
&& !options.getMaxAutoRenewDuration().isZero();
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
options.isEnableAutoComplete(), isAutoLockRenewal, options.getMaxAutoRenewDuration(),
connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} |
What happens if they set Duration.ZERO? Would we try the window timeout approach? | void startPartitionPump(PartitionOwnership claimedOwnership, Checkpoint checkpoint) {
if (partitionPumps.containsKey(claimedOwnership.getPartitionId())) {
logger.verbose("Consumer is already running for this partition {}", claimedOwnership.getPartitionId());
return;
}
try {
PartitionContext partitionContext = new PartitionContext(claimedOwnership.getFullyQualifiedNamespace(),
claimedOwnership.getEventHubName(), claimedOwnership.getConsumerGroup(),
claimedOwnership.getPartitionId());
PartitionProcessor partitionProcessor = this.partitionProcessorFactory.get();
InitializationContext initializationContext = new InitializationContext(partitionContext);
partitionProcessor.initialize(initializationContext);
EventPosition startFromEventPosition = null;
if (checkpoint != null && checkpoint.getOffset() != null) {
startFromEventPosition = EventPosition.fromOffset(checkpoint.getOffset());
} else if (checkpoint != null && checkpoint.getSequenceNumber() != null) {
startFromEventPosition = EventPosition.fromSequenceNumber(checkpoint.getSequenceNumber());
} else if (initialPartitionEventPosition.containsKey(claimedOwnership.getPartitionId())) {
startFromEventPosition = initialPartitionEventPosition.get(claimedOwnership.getPartitionId());
} else {
startFromEventPosition = EventPosition.latest();
}
logger.info("Starting event processing from {} for partition {}", startFromEventPosition,
claimedOwnership.getPartitionId());
ReceiveOptions receiveOptions = new ReceiveOptions().setOwnerLevel(0L)
.setTrackLastEnqueuedEventProperties(trackLastEnqueuedEventProperties);
EventHubConsumerAsyncClient eventHubConsumer = eventHubClientBuilder.buildAsyncClient()
.createConsumer(claimedOwnership.getConsumerGroup(), EventHubClientBuilder.DEFAULT_PREFETCH_COUNT);
partitionPumps.put(claimedOwnership.getPartitionId(), eventHubConsumer);
if (maxWaitTime != null) {
eventHubConsumer
.receiveFromPartition(claimedOwnership.getPartitionId(), startFromEventPosition, receiveOptions)
.bufferTimeout(maxBatchSize, maxWaitTime)
.subscribe(partitionEventBatch -> {
processEventBatch(partitionContext, partitionProcessor,
eventHubConsumer, partitionEventBatch);
},
/* EventHubConsumer receive() returned an error */
ex -> handleError(claimedOwnership, eventHubConsumer, partitionProcessor, ex, partitionContext),
() -> {
partitionProcessor.close(new CloseContext(partitionContext,
CloseReason.EVENT_PROCESSOR_SHUTDOWN));
cleanup(claimedOwnership, eventHubConsumer);
});
} else {
eventHubConsumer
.receiveFromPartition(claimedOwnership.getPartitionId(), startFromEventPosition, receiveOptions)
.buffer(maxBatchSize)
.subscribe(partitionEventBatch -> {
processEventBatch(partitionContext, partitionProcessor,
eventHubConsumer, partitionEventBatch);
},
/* EventHubConsumer receive() returned an error */
ex -> handleError(claimedOwnership, eventHubConsumer, partitionProcessor, ex, partitionContext),
() -> {
partitionProcessor.close(new CloseContext(partitionContext,
CloseReason.EVENT_PROCESSOR_SHUTDOWN));
cleanup(claimedOwnership, eventHubConsumer);
});
}
} catch (Exception ex) {
if (partitionPumps.containsKey(claimedOwnership.getPartitionId())) {
cleanup(claimedOwnership, partitionPumps.get(claimedOwnership.getPartitionId()));
}
throw logger.logExceptionAsError(
new PartitionProcessorException(
"Error occurred while starting partition pump for partition " + claimedOwnership.getPartitionId(),
ex));
}
} | if (maxWaitTime != null) { | void startPartitionPump(PartitionOwnership claimedOwnership, Checkpoint checkpoint) {
if (partitionPumps.containsKey(claimedOwnership.getPartitionId())) {
logger.verbose("Consumer is already running for this partition {}", claimedOwnership.getPartitionId());
return;
}
try {
PartitionContext partitionContext = new PartitionContext(claimedOwnership.getFullyQualifiedNamespace(),
claimedOwnership.getEventHubName(), claimedOwnership.getConsumerGroup(),
claimedOwnership.getPartitionId());
PartitionProcessor partitionProcessor = this.partitionProcessorFactory.get();
InitializationContext initializationContext = new InitializationContext(partitionContext);
partitionProcessor.initialize(initializationContext);
EventPosition startFromEventPosition = null;
if (checkpoint != null && checkpoint.getOffset() != null) {
startFromEventPosition = EventPosition.fromOffset(checkpoint.getOffset());
} else if (checkpoint != null && checkpoint.getSequenceNumber() != null) {
startFromEventPosition = EventPosition.fromSequenceNumber(checkpoint.getSequenceNumber());
} else if (initialPartitionEventPosition.containsKey(claimedOwnership.getPartitionId())) {
startFromEventPosition = initialPartitionEventPosition.get(claimedOwnership.getPartitionId());
} else {
startFromEventPosition = EventPosition.latest();
}
logger.info("Starting event processing from {} for partition {}", startFromEventPosition,
claimedOwnership.getPartitionId());
ReceiveOptions receiveOptions = new ReceiveOptions().setOwnerLevel(0L)
.setTrackLastEnqueuedEventProperties(trackLastEnqueuedEventProperties);
EventHubConsumerAsyncClient eventHubConsumer = eventHubClientBuilder.buildAsyncClient()
.createConsumer(claimedOwnership.getConsumerGroup(), EventHubClientBuilder.DEFAULT_PREFETCH_COUNT);
partitionPumps.put(claimedOwnership.getPartitionId(), eventHubConsumer);
if (maxWaitTime != null) {
eventHubConsumer
.receiveFromPartition(claimedOwnership.getPartitionId(), startFromEventPosition, receiveOptions)
.windowTimeout(maxBatchSize, maxWaitTime)
.flatMap(Flux::collectList)
.subscribe(partitionEventBatch -> processEventBatch(partitionContext, partitionProcessor,
eventHubConsumer, partitionEventBatch),
/* EventHubConsumer receive() returned an error */
ex -> handleError(claimedOwnership, eventHubConsumer, partitionProcessor, ex, partitionContext),
() -> {
partitionProcessor.close(new CloseContext(partitionContext,
CloseReason.EVENT_PROCESSOR_SHUTDOWN));
cleanup(claimedOwnership, eventHubConsumer);
});
} else {
eventHubConsumer
.receiveFromPartition(claimedOwnership.getPartitionId(), startFromEventPosition, receiveOptions)
.window(maxBatchSize)
.flatMap(Flux::collectList)
.subscribe(partitionEventBatch -> {
processEventBatch(partitionContext, partitionProcessor,
eventHubConsumer, partitionEventBatch);
},
/* EventHubConsumer receive() returned an error */
ex -> handleError(claimedOwnership, eventHubConsumer, partitionProcessor, ex, partitionContext),
() -> {
partitionProcessor.close(new CloseContext(partitionContext,
CloseReason.EVENT_PROCESSOR_SHUTDOWN));
cleanup(claimedOwnership, eventHubConsumer);
});
}
} catch (Exception ex) {
if (partitionPumps.containsKey(claimedOwnership.getPartitionId())) {
cleanup(claimedOwnership, partitionPumps.get(claimedOwnership.getPartitionId()));
}
throw logger.logExceptionAsError(
new PartitionProcessorException(
"Error occurred while starting partition pump for partition " + claimedOwnership.getPartitionId(),
ex));
}
} | class PartitionPumpManager {
private final ClientLogger logger = new ClientLogger(PartitionPumpManager.class);
private final CheckpointStore checkpointStore;
private final Map<String, EventHubConsumerAsyncClient> partitionPumps = new ConcurrentHashMap<>();
private final Supplier<PartitionProcessor> partitionProcessorFactory;
private final EventHubClientBuilder eventHubClientBuilder;
private final TracerProvider tracerProvider;
private final boolean trackLastEnqueuedEventProperties;
private final Map<String, EventPosition> initialPartitionEventPosition;
private final Duration maxWaitTime;
private final int maxBatchSize;
private final boolean batchReceiveMode;
/**
* Creates an instance of partition pump manager.
*
* @param checkpointStore The partition manager that is used to store and update checkpoints.
* @param partitionProcessorFactory The partition processor factory that is used to create new instances of {@link
* PartitionProcessor} when new partition pumps are started.
* @param eventHubClientBuilder The client builder used to create new clients (and new connections) for each
* partition processed by this {@link EventProcessorClient}.
* @param trackLastEnqueuedEventProperties If set to {@code true}, all events received by this EventProcessorClient
* will also include the last enqueued event properties for it's respective partitions.
* @param tracerProvider The tracer implementation.
* @param initialPartitionEventPosition Map of initial event positions for partition ids.
* @param maxBatchSize The maximum batch size to receive per users' process handler invocation.
* @param maxWaitTime The maximum time to wait to receive a batch or a single event.
* @param batchReceiveMode The boolean value indicating if this processor is configured to receive in batches or
* single events.
*/
PartitionPumpManager(CheckpointStore checkpointStore,
Supplier<PartitionProcessor> partitionProcessorFactory, EventHubClientBuilder eventHubClientBuilder,
boolean trackLastEnqueuedEventProperties, TracerProvider tracerProvider,
Map<String, EventPosition> initialPartitionEventPosition, int maxBatchSize, Duration maxWaitTime,
boolean batchReceiveMode) {
this.checkpointStore = checkpointStore;
this.partitionProcessorFactory = partitionProcessorFactory;
this.eventHubClientBuilder = eventHubClientBuilder;
this.trackLastEnqueuedEventProperties = trackLastEnqueuedEventProperties;
this.tracerProvider = tracerProvider;
this.initialPartitionEventPosition = initialPartitionEventPosition;
this.maxBatchSize = maxBatchSize;
this.maxWaitTime = maxWaitTime;
this.batchReceiveMode = batchReceiveMode;
}
/**
* Stops all partition pumps that are actively consuming events. This method is invoked when the {@link
* EventProcessorClient} is requested to stop.
*/
void stopAllPartitionPumps() {
this.partitionPumps.forEach((partitionId, eventHubConsumer) -> {
try {
eventHubConsumer.close();
} catch (Exception ex) {
logger.warning(Messages.FAILED_CLOSE_CONSUMER_PARTITION, partitionId, ex);
} finally {
partitionPumps.remove(partitionId);
}
});
}
/**
* Starts a new partition pump for the newly claimed partition. If the partition already has an active partition
* pump, this will not create a new consumer.
*
* @param claimedOwnership The details of partition ownership for which new partition pump is requested to start.
*/
private void processEvent(PartitionContext partitionContext, PartitionProcessor partitionProcessor,
EventHubConsumerAsyncClient eventHubConsumer, PartitionEvent partitionEvent) {
EventData eventData = partitionEvent.getData();
Context processSpanContext = startProcessTracingSpan(eventData, eventHubConsumer.getEventHubName(),
eventHubConsumer.getFullyQualifiedNamespace());
if (processSpanContext.getData(SPAN_CONTEXT_KEY).isPresent()) {
eventData.addContext(SPAN_CONTEXT_KEY, processSpanContext);
}
try {
partitionProcessor.processEvent(new EventContext(partitionContext, eventData, checkpointStore,
partitionEvent.getLastEnqueuedEventProperties()));
endProcessTracingSpan(processSpanContext, Signal.complete());
} catch (Throwable throwable) {
/* user code for event processing threw an exception - log and bubble up */
endProcessTracingSpan(processSpanContext, Signal.error(throwable));
throw logger.logExceptionAsError(new PartitionProcessorException("Error in event processing callback",
throwable));
}
}
private void processEventBatch(PartitionContext partitionContext, PartitionProcessor partitionProcessor,
EventHubConsumerAsyncClient eventHubConsumer, List<PartitionEvent> partitionEventBatch) {
List<EventContext> eventContextBatch = partitionEventBatch.stream()
.map(partitionEvent -> new EventContext(partitionContext, partitionEvent.getData(), checkpointStore,
partitionEvent.getLastEnqueuedEventProperties()))
.collect(Collectors.toList());
try {
if (!batchReceiveMode && maxBatchSize == 1) {
EventContext eventContext;
if (eventContextBatch.isEmpty()) {
eventContext = new EventContext(partitionContext, null, checkpointStore, null);
} else {
eventContext = eventContextBatch.get(0);
}
partitionProcessor.processEvent(eventContext);
} else {
partitionProcessor.processEventBatch(eventContextBatch);
}
} catch (Throwable throwable) {
/* user code for event processing threw an exception - log and bubble up */
throw logger.logExceptionAsError(new PartitionProcessorException("Error in event processing callback",
throwable));
}
}
Map<String, EventHubConsumerAsyncClient> getPartitionPumps() {
return this.partitionPumps;
}
private void handleError(PartitionOwnership claimedOwnership, EventHubConsumerAsyncClient eventHubConsumer,
PartitionProcessor partitionProcessor, Throwable throwable, PartitionContext partitionContext) {
boolean shouldRethrow = true;
if (!(throwable instanceof PartitionProcessorException)) {
shouldRethrow = false;
logger.warning("Error receiving events from partition {}", partitionContext.getPartitionId(), throwable);
partitionProcessor.processError(new ErrorContext(partitionContext, throwable));
}
CloseReason closeReason = CloseReason.LOST_PARTITION_OWNERSHIP;
partitionProcessor.close(new CloseContext(partitionContext, closeReason));
cleanup(claimedOwnership, eventHubConsumer);
if (shouldRethrow) {
PartitionProcessorException exception = (PartitionProcessorException) throwable;
throw logger.logExceptionAsError(exception);
}
}
private void cleanup(PartitionOwnership claimedOwnership, EventHubConsumerAsyncClient eventHubConsumer) {
try {
logger.info("Closing consumer for partition id {}", claimedOwnership.getPartitionId());
eventHubConsumer.close();
} finally {
logger.info("Removing partition id {} from list of processing partitions",
claimedOwnership.getPartitionId());
partitionPumps.remove(claimedOwnership.getPartitionId());
}
}
/*
* Starts a new process tracing span and attaches the returned context to the EventData object for users.
*/
private Context startProcessTracingSpan(EventData eventData, String eventHubName, String fullyQualifiedNamespace) {
Object diagnosticId = eventData.getProperties().get(DIAGNOSTIC_ID_KEY);
if (diagnosticId == null || !tracerProvider.isEnabled()) {
return Context.NONE;
}
Context spanContext = tracerProvider.extractContext(diagnosticId.toString(), Context.NONE)
.addData(ENTITY_PATH_KEY, eventHubName)
.addData(HOST_NAME_KEY, fullyQualifiedNamespace)
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
spanContext = eventData.getEnqueuedTime() == null
? spanContext
: spanContext.addData(MESSAGE_ENQUEUED_TIME, eventData.getEnqueuedTime().getEpochSecond());
return tracerProvider.startSpan(spanContext, ProcessKind.PROCESS);
}
/*
* Ends the process tracing span and the scope of that span.
*/
private void endProcessTracingSpan(Context processSpanContext, Signal<Void> signal) {
Optional<Object> spanScope = processSpanContext.getData(SCOPE_KEY);
if (!spanScope.isPresent() || !tracerProvider.isEnabled()) {
return;
}
if (spanScope.get() instanceof Closeable) {
Closeable close = (Closeable) processSpanContext.getData(SCOPE_KEY).get();
try {
close.close();
tracerProvider.endSpan(processSpanContext, signal);
} catch (IOException ioException) {
logger.error(Messages.EVENT_PROCESSOR_RUN_END, ioException);
}
} else {
logger.warning(String.format(Locale.US,
Messages.PROCESS_SPAN_SCOPE_TYPE_ERROR,
spanScope.get() != null ? spanScope.getClass() : "null"));
}
}
} | class PartitionPumpManager {
private final ClientLogger logger = new ClientLogger(PartitionPumpManager.class);
private final CheckpointStore checkpointStore;
private final Map<String, EventHubConsumerAsyncClient> partitionPumps = new ConcurrentHashMap<>();
private final Supplier<PartitionProcessor> partitionProcessorFactory;
private final EventHubClientBuilder eventHubClientBuilder;
private final TracerProvider tracerProvider;
private final boolean trackLastEnqueuedEventProperties;
private final Map<String, EventPosition> initialPartitionEventPosition;
private final Duration maxWaitTime;
private final int maxBatchSize;
private final boolean batchReceiveMode;
/**
* Creates an instance of partition pump manager.
*
* @param checkpointStore The partition manager that is used to store and update checkpoints.
* @param partitionProcessorFactory The partition processor factory that is used to create new instances of {@link
* PartitionProcessor} when new partition pumps are started.
* @param eventHubClientBuilder The client builder used to create new clients (and new connections) for each
* partition processed by this {@link EventProcessorClient}.
* @param trackLastEnqueuedEventProperties If set to {@code true}, all events received by this EventProcessorClient
* will also include the last enqueued event properties for it's respective partitions.
* @param tracerProvider The tracer implementation.
* @param initialPartitionEventPosition Map of initial event positions for partition ids.
* @param maxBatchSize The maximum batch size to receive per users' process handler invocation.
* @param maxWaitTime The maximum time to wait to receive a batch or a single event.
* @param batchReceiveMode The boolean value indicating if this processor is configured to receive in batches or
* single events.
*/
PartitionPumpManager(CheckpointStore checkpointStore,
Supplier<PartitionProcessor> partitionProcessorFactory, EventHubClientBuilder eventHubClientBuilder,
boolean trackLastEnqueuedEventProperties, TracerProvider tracerProvider,
Map<String, EventPosition> initialPartitionEventPosition, int maxBatchSize, Duration maxWaitTime,
boolean batchReceiveMode) {
this.checkpointStore = checkpointStore;
this.partitionProcessorFactory = partitionProcessorFactory;
this.eventHubClientBuilder = eventHubClientBuilder;
this.trackLastEnqueuedEventProperties = trackLastEnqueuedEventProperties;
this.tracerProvider = tracerProvider;
this.initialPartitionEventPosition = initialPartitionEventPosition;
this.maxBatchSize = maxBatchSize;
this.maxWaitTime = maxWaitTime;
this.batchReceiveMode = batchReceiveMode;
}
/**
* Stops all partition pumps that are actively consuming events. This method is invoked when the {@link
* EventProcessorClient} is requested to stop.
*/
void stopAllPartitionPumps() {
this.partitionPumps.forEach((partitionId, eventHubConsumer) -> {
try {
eventHubConsumer.close();
} catch (Exception ex) {
logger.warning(Messages.FAILED_CLOSE_CONSUMER_PARTITION, partitionId, ex);
} finally {
partitionPumps.remove(partitionId);
}
});
}
/**
* Starts a new partition pump for the newly claimed partition. If the partition already has an active partition
* pump, this will not create a new consumer.
*
* @param claimedOwnership The details of partition ownership for which new partition pump is requested to start.
*/
private void processEvent(PartitionContext partitionContext, PartitionProcessor partitionProcessor,
EventHubConsumerAsyncClient eventHubConsumer, EventContext eventContext) {
EventData eventData = eventContext.getEventData();
Context processSpanContext = startProcessTracingSpan(eventData, eventHubConsumer.getEventHubName(),
eventHubConsumer.getFullyQualifiedNamespace());
if (processSpanContext.getData(SPAN_CONTEXT_KEY).isPresent()) {
eventData.addContext(SPAN_CONTEXT_KEY, processSpanContext);
}
try {
partitionProcessor.processEvent(new EventContext(partitionContext, eventData, checkpointStore,
eventContext.getLastEnqueuedEventProperties()));
endProcessTracingSpan(processSpanContext, Signal.complete());
} catch (Throwable throwable) {
/* user code for event processing threw an exception - log and bubble up */
endProcessTracingSpan(processSpanContext, Signal.error(throwable));
throw logger.logExceptionAsError(new PartitionProcessorException("Error in event processing callback",
throwable));
}
}
private void processEventBatch(PartitionContext partitionContext, PartitionProcessor partitionProcessor,
EventHubConsumerAsyncClient eventHubConsumer, List<PartitionEvent> partitionEventBatch) {
List<EventContext> eventContextBatch = partitionEventBatch.stream()
.map(partitionEvent -> new EventContext(partitionContext, partitionEvent.getData(), checkpointStore,
partitionEvent.getLastEnqueuedEventProperties()))
.collect(Collectors.toList());
try {
if (!batchReceiveMode && maxBatchSize == 1) {
EventContext eventContext;
if (eventContextBatch.isEmpty()) {
eventContext = new EventContext(partitionContext, null, checkpointStore, null);
partitionProcessor.processEvent(eventContext);
} else {
processEvent(partitionContext, partitionProcessor, eventHubConsumer, eventContextBatch.get(0));
}
} else {
partitionProcessor.processEventBatch(eventContextBatch);
}
} catch (Throwable throwable) {
/* user code for event processing threw an exception - log and bubble up */
throw logger.logExceptionAsError(new PartitionProcessorException("Error in event processing callback",
throwable));
}
}
Map<String, EventHubConsumerAsyncClient> getPartitionPumps() {
return this.partitionPumps;
}
private void handleError(PartitionOwnership claimedOwnership, EventHubConsumerAsyncClient eventHubConsumer,
PartitionProcessor partitionProcessor, Throwable throwable, PartitionContext partitionContext) {
boolean shouldRethrow = true;
if (!(throwable instanceof PartitionProcessorException)) {
shouldRethrow = false;
logger.warning("Error receiving events from partition {}", partitionContext.getPartitionId(), throwable);
partitionProcessor.processError(new ErrorContext(partitionContext, throwable));
}
CloseReason closeReason = CloseReason.LOST_PARTITION_OWNERSHIP;
partitionProcessor.close(new CloseContext(partitionContext, closeReason));
cleanup(claimedOwnership, eventHubConsumer);
if (shouldRethrow) {
PartitionProcessorException exception = (PartitionProcessorException) throwable;
throw logger.logExceptionAsError(exception);
}
}
private void cleanup(PartitionOwnership claimedOwnership, EventHubConsumerAsyncClient eventHubConsumer) {
try {
logger.info("Closing consumer for partition id {}", claimedOwnership.getPartitionId());
eventHubConsumer.close();
} finally {
logger.info("Removing partition id {} from list of processing partitions",
claimedOwnership.getPartitionId());
partitionPumps.remove(claimedOwnership.getPartitionId());
}
}
/*
* Starts a new process tracing span and attaches the returned context to the EventData object for users.
*/
private Context startProcessTracingSpan(EventData eventData, String eventHubName, String fullyQualifiedNamespace) {
Object diagnosticId = eventData.getProperties().get(DIAGNOSTIC_ID_KEY);
if (diagnosticId == null || !tracerProvider.isEnabled()) {
return Context.NONE;
}
Context spanContext = tracerProvider.extractContext(diagnosticId.toString(), Context.NONE)
.addData(ENTITY_PATH_KEY, eventHubName)
.addData(HOST_NAME_KEY, fullyQualifiedNamespace)
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
spanContext = eventData.getEnqueuedTime() == null
? spanContext
: spanContext.addData(MESSAGE_ENQUEUED_TIME, eventData.getEnqueuedTime().getEpochSecond());
return tracerProvider.startSpan(spanContext, ProcessKind.PROCESS);
}
/*
* Ends the process tracing span and the scope of that span.
*/
private void endProcessTracingSpan(Context processSpanContext, Signal<Void> signal) {
Optional<Object> spanScope = processSpanContext.getData(SCOPE_KEY);
if (!spanScope.isPresent() || !tracerProvider.isEnabled()) {
return;
}
if (spanScope.get() instanceof Closeable) {
Closeable close = (Closeable) processSpanContext.getData(SCOPE_KEY).get();
try {
close.close();
tracerProvider.endSpan(processSpanContext, signal);
} catch (IOException ioException) {
logger.error(Messages.EVENT_PROCESSOR_RUN_END, ioException);
}
} else {
logger.warning(String.format(Locale.US,
Messages.PROCESS_SPAN_SCOPE_TYPE_ERROR,
spanScope.get() != null ? spanScope.getClass() : "null"));
}
}
} |
Added validation to check that duration is not zero and also switched to window timeout as that seems like the more suited operator here. | void startPartitionPump(PartitionOwnership claimedOwnership, Checkpoint checkpoint) {
if (partitionPumps.containsKey(claimedOwnership.getPartitionId())) {
logger.verbose("Consumer is already running for this partition {}", claimedOwnership.getPartitionId());
return;
}
try {
PartitionContext partitionContext = new PartitionContext(claimedOwnership.getFullyQualifiedNamespace(),
claimedOwnership.getEventHubName(), claimedOwnership.getConsumerGroup(),
claimedOwnership.getPartitionId());
PartitionProcessor partitionProcessor = this.partitionProcessorFactory.get();
InitializationContext initializationContext = new InitializationContext(partitionContext);
partitionProcessor.initialize(initializationContext);
EventPosition startFromEventPosition = null;
if (checkpoint != null && checkpoint.getOffset() != null) {
startFromEventPosition = EventPosition.fromOffset(checkpoint.getOffset());
} else if (checkpoint != null && checkpoint.getSequenceNumber() != null) {
startFromEventPosition = EventPosition.fromSequenceNumber(checkpoint.getSequenceNumber());
} else if (initialPartitionEventPosition.containsKey(claimedOwnership.getPartitionId())) {
startFromEventPosition = initialPartitionEventPosition.get(claimedOwnership.getPartitionId());
} else {
startFromEventPosition = EventPosition.latest();
}
logger.info("Starting event processing from {} for partition {}", startFromEventPosition,
claimedOwnership.getPartitionId());
ReceiveOptions receiveOptions = new ReceiveOptions().setOwnerLevel(0L)
.setTrackLastEnqueuedEventProperties(trackLastEnqueuedEventProperties);
EventHubConsumerAsyncClient eventHubConsumer = eventHubClientBuilder.buildAsyncClient()
.createConsumer(claimedOwnership.getConsumerGroup(), EventHubClientBuilder.DEFAULT_PREFETCH_COUNT);
partitionPumps.put(claimedOwnership.getPartitionId(), eventHubConsumer);
if (maxWaitTime != null) {
eventHubConsumer
.receiveFromPartition(claimedOwnership.getPartitionId(), startFromEventPosition, receiveOptions)
.bufferTimeout(maxBatchSize, maxWaitTime)
.subscribe(partitionEventBatch -> {
processEventBatch(partitionContext, partitionProcessor,
eventHubConsumer, partitionEventBatch);
},
/* EventHubConsumer receive() returned an error */
ex -> handleError(claimedOwnership, eventHubConsumer, partitionProcessor, ex, partitionContext),
() -> {
partitionProcessor.close(new CloseContext(partitionContext,
CloseReason.EVENT_PROCESSOR_SHUTDOWN));
cleanup(claimedOwnership, eventHubConsumer);
});
} else {
eventHubConsumer
.receiveFromPartition(claimedOwnership.getPartitionId(), startFromEventPosition, receiveOptions)
.buffer(maxBatchSize)
.subscribe(partitionEventBatch -> {
processEventBatch(partitionContext, partitionProcessor,
eventHubConsumer, partitionEventBatch);
},
/* EventHubConsumer receive() returned an error */
ex -> handleError(claimedOwnership, eventHubConsumer, partitionProcessor, ex, partitionContext),
() -> {
partitionProcessor.close(new CloseContext(partitionContext,
CloseReason.EVENT_PROCESSOR_SHUTDOWN));
cleanup(claimedOwnership, eventHubConsumer);
});
}
} catch (Exception ex) {
if (partitionPumps.containsKey(claimedOwnership.getPartitionId())) {
cleanup(claimedOwnership, partitionPumps.get(claimedOwnership.getPartitionId()));
}
throw logger.logExceptionAsError(
new PartitionProcessorException(
"Error occurred while starting partition pump for partition " + claimedOwnership.getPartitionId(),
ex));
}
} | if (maxWaitTime != null) { | void startPartitionPump(PartitionOwnership claimedOwnership, Checkpoint checkpoint) {
if (partitionPumps.containsKey(claimedOwnership.getPartitionId())) {
logger.verbose("Consumer is already running for this partition {}", claimedOwnership.getPartitionId());
return;
}
try {
PartitionContext partitionContext = new PartitionContext(claimedOwnership.getFullyQualifiedNamespace(),
claimedOwnership.getEventHubName(), claimedOwnership.getConsumerGroup(),
claimedOwnership.getPartitionId());
PartitionProcessor partitionProcessor = this.partitionProcessorFactory.get();
InitializationContext initializationContext = new InitializationContext(partitionContext);
partitionProcessor.initialize(initializationContext);
EventPosition startFromEventPosition = null;
if (checkpoint != null && checkpoint.getOffset() != null) {
startFromEventPosition = EventPosition.fromOffset(checkpoint.getOffset());
} else if (checkpoint != null && checkpoint.getSequenceNumber() != null) {
startFromEventPosition = EventPosition.fromSequenceNumber(checkpoint.getSequenceNumber());
} else if (initialPartitionEventPosition.containsKey(claimedOwnership.getPartitionId())) {
startFromEventPosition = initialPartitionEventPosition.get(claimedOwnership.getPartitionId());
} else {
startFromEventPosition = EventPosition.latest();
}
logger.info("Starting event processing from {} for partition {}", startFromEventPosition,
claimedOwnership.getPartitionId());
ReceiveOptions receiveOptions = new ReceiveOptions().setOwnerLevel(0L)
.setTrackLastEnqueuedEventProperties(trackLastEnqueuedEventProperties);
EventHubConsumerAsyncClient eventHubConsumer = eventHubClientBuilder.buildAsyncClient()
.createConsumer(claimedOwnership.getConsumerGroup(), EventHubClientBuilder.DEFAULT_PREFETCH_COUNT);
partitionPumps.put(claimedOwnership.getPartitionId(), eventHubConsumer);
if (maxWaitTime != null) {
eventHubConsumer
.receiveFromPartition(claimedOwnership.getPartitionId(), startFromEventPosition, receiveOptions)
.windowTimeout(maxBatchSize, maxWaitTime)
.flatMap(Flux::collectList)
.subscribe(partitionEventBatch -> processEventBatch(partitionContext, partitionProcessor,
eventHubConsumer, partitionEventBatch),
/* EventHubConsumer receive() returned an error */
ex -> handleError(claimedOwnership, eventHubConsumer, partitionProcessor, ex, partitionContext),
() -> {
partitionProcessor.close(new CloseContext(partitionContext,
CloseReason.EVENT_PROCESSOR_SHUTDOWN));
cleanup(claimedOwnership, eventHubConsumer);
});
} else {
eventHubConsumer
.receiveFromPartition(claimedOwnership.getPartitionId(), startFromEventPosition, receiveOptions)
.window(maxBatchSize)
.flatMap(Flux::collectList)
.subscribe(partitionEventBatch -> {
processEventBatch(partitionContext, partitionProcessor,
eventHubConsumer, partitionEventBatch);
},
/* EventHubConsumer receive() returned an error */
ex -> handleError(claimedOwnership, eventHubConsumer, partitionProcessor, ex, partitionContext),
() -> {
partitionProcessor.close(new CloseContext(partitionContext,
CloseReason.EVENT_PROCESSOR_SHUTDOWN));
cleanup(claimedOwnership, eventHubConsumer);
});
}
} catch (Exception ex) {
if (partitionPumps.containsKey(claimedOwnership.getPartitionId())) {
cleanup(claimedOwnership, partitionPumps.get(claimedOwnership.getPartitionId()));
}
throw logger.logExceptionAsError(
new PartitionProcessorException(
"Error occurred while starting partition pump for partition " + claimedOwnership.getPartitionId(),
ex));
}
} | class PartitionPumpManager {
private final ClientLogger logger = new ClientLogger(PartitionPumpManager.class);
private final CheckpointStore checkpointStore;
private final Map<String, EventHubConsumerAsyncClient> partitionPumps = new ConcurrentHashMap<>();
private final Supplier<PartitionProcessor> partitionProcessorFactory;
private final EventHubClientBuilder eventHubClientBuilder;
private final TracerProvider tracerProvider;
private final boolean trackLastEnqueuedEventProperties;
private final Map<String, EventPosition> initialPartitionEventPosition;
private final Duration maxWaitTime;
private final int maxBatchSize;
private final boolean batchReceiveMode;
/**
* Creates an instance of partition pump manager.
*
* @param checkpointStore The partition manager that is used to store and update checkpoints.
* @param partitionProcessorFactory The partition processor factory that is used to create new instances of {@link
* PartitionProcessor} when new partition pumps are started.
* @param eventHubClientBuilder The client builder used to create new clients (and new connections) for each
* partition processed by this {@link EventProcessorClient}.
* @param trackLastEnqueuedEventProperties If set to {@code true}, all events received by this EventProcessorClient
* will also include the last enqueued event properties for it's respective partitions.
* @param tracerProvider The tracer implementation.
* @param initialPartitionEventPosition Map of initial event positions for partition ids.
* @param maxBatchSize The maximum batch size to receive per users' process handler invocation.
* @param maxWaitTime The maximum time to wait to receive a batch or a single event.
* @param batchReceiveMode The boolean value indicating if this processor is configured to receive in batches or
* single events.
*/
PartitionPumpManager(CheckpointStore checkpointStore,
Supplier<PartitionProcessor> partitionProcessorFactory, EventHubClientBuilder eventHubClientBuilder,
boolean trackLastEnqueuedEventProperties, TracerProvider tracerProvider,
Map<String, EventPosition> initialPartitionEventPosition, int maxBatchSize, Duration maxWaitTime,
boolean batchReceiveMode) {
this.checkpointStore = checkpointStore;
this.partitionProcessorFactory = partitionProcessorFactory;
this.eventHubClientBuilder = eventHubClientBuilder;
this.trackLastEnqueuedEventProperties = trackLastEnqueuedEventProperties;
this.tracerProvider = tracerProvider;
this.initialPartitionEventPosition = initialPartitionEventPosition;
this.maxBatchSize = maxBatchSize;
this.maxWaitTime = maxWaitTime;
this.batchReceiveMode = batchReceiveMode;
}
/**
* Stops all partition pumps that are actively consuming events. This method is invoked when the {@link
* EventProcessorClient} is requested to stop.
*/
void stopAllPartitionPumps() {
this.partitionPumps.forEach((partitionId, eventHubConsumer) -> {
try {
eventHubConsumer.close();
} catch (Exception ex) {
logger.warning(Messages.FAILED_CLOSE_CONSUMER_PARTITION, partitionId, ex);
} finally {
partitionPumps.remove(partitionId);
}
});
}
/**
* Starts a new partition pump for the newly claimed partition. If the partition already has an active partition
* pump, this will not create a new consumer.
*
* @param claimedOwnership The details of partition ownership for which new partition pump is requested to start.
*/
private void processEvent(PartitionContext partitionContext, PartitionProcessor partitionProcessor,
EventHubConsumerAsyncClient eventHubConsumer, PartitionEvent partitionEvent) {
EventData eventData = partitionEvent.getData();
Context processSpanContext = startProcessTracingSpan(eventData, eventHubConsumer.getEventHubName(),
eventHubConsumer.getFullyQualifiedNamespace());
if (processSpanContext.getData(SPAN_CONTEXT_KEY).isPresent()) {
eventData.addContext(SPAN_CONTEXT_KEY, processSpanContext);
}
try {
partitionProcessor.processEvent(new EventContext(partitionContext, eventData, checkpointStore,
partitionEvent.getLastEnqueuedEventProperties()));
endProcessTracingSpan(processSpanContext, Signal.complete());
} catch (Throwable throwable) {
/* user code for event processing threw an exception - log and bubble up */
endProcessTracingSpan(processSpanContext, Signal.error(throwable));
throw logger.logExceptionAsError(new PartitionProcessorException("Error in event processing callback",
throwable));
}
}
private void processEventBatch(PartitionContext partitionContext, PartitionProcessor partitionProcessor,
EventHubConsumerAsyncClient eventHubConsumer, List<PartitionEvent> partitionEventBatch) {
List<EventContext> eventContextBatch = partitionEventBatch.stream()
.map(partitionEvent -> new EventContext(partitionContext, partitionEvent.getData(), checkpointStore,
partitionEvent.getLastEnqueuedEventProperties()))
.collect(Collectors.toList());
try {
if (!batchReceiveMode && maxBatchSize == 1) {
EventContext eventContext;
if (eventContextBatch.isEmpty()) {
eventContext = new EventContext(partitionContext, null, checkpointStore, null);
} else {
eventContext = eventContextBatch.get(0);
}
partitionProcessor.processEvent(eventContext);
} else {
partitionProcessor.processEventBatch(eventContextBatch);
}
} catch (Throwable throwable) {
/* user code for event processing threw an exception - log and bubble up */
throw logger.logExceptionAsError(new PartitionProcessorException("Error in event processing callback",
throwable));
}
}
Map<String, EventHubConsumerAsyncClient> getPartitionPumps() {
return this.partitionPumps;
}
private void handleError(PartitionOwnership claimedOwnership, EventHubConsumerAsyncClient eventHubConsumer,
PartitionProcessor partitionProcessor, Throwable throwable, PartitionContext partitionContext) {
boolean shouldRethrow = true;
if (!(throwable instanceof PartitionProcessorException)) {
shouldRethrow = false;
logger.warning("Error receiving events from partition {}", partitionContext.getPartitionId(), throwable);
partitionProcessor.processError(new ErrorContext(partitionContext, throwable));
}
CloseReason closeReason = CloseReason.LOST_PARTITION_OWNERSHIP;
partitionProcessor.close(new CloseContext(partitionContext, closeReason));
cleanup(claimedOwnership, eventHubConsumer);
if (shouldRethrow) {
PartitionProcessorException exception = (PartitionProcessorException) throwable;
throw logger.logExceptionAsError(exception);
}
}
private void cleanup(PartitionOwnership claimedOwnership, EventHubConsumerAsyncClient eventHubConsumer) {
try {
logger.info("Closing consumer for partition id {}", claimedOwnership.getPartitionId());
eventHubConsumer.close();
} finally {
logger.info("Removing partition id {} from list of processing partitions",
claimedOwnership.getPartitionId());
partitionPumps.remove(claimedOwnership.getPartitionId());
}
}
/*
* Starts a new process tracing span and attaches the returned context to the EventData object for users.
*/
private Context startProcessTracingSpan(EventData eventData, String eventHubName, String fullyQualifiedNamespace) {
Object diagnosticId = eventData.getProperties().get(DIAGNOSTIC_ID_KEY);
if (diagnosticId == null || !tracerProvider.isEnabled()) {
return Context.NONE;
}
Context spanContext = tracerProvider.extractContext(diagnosticId.toString(), Context.NONE)
.addData(ENTITY_PATH_KEY, eventHubName)
.addData(HOST_NAME_KEY, fullyQualifiedNamespace)
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
spanContext = eventData.getEnqueuedTime() == null
? spanContext
: spanContext.addData(MESSAGE_ENQUEUED_TIME, eventData.getEnqueuedTime().getEpochSecond());
return tracerProvider.startSpan(spanContext, ProcessKind.PROCESS);
}
/*
* Ends the process tracing span and the scope of that span.
*/
private void endProcessTracingSpan(Context processSpanContext, Signal<Void> signal) {
Optional<Object> spanScope = processSpanContext.getData(SCOPE_KEY);
if (!spanScope.isPresent() || !tracerProvider.isEnabled()) {
return;
}
if (spanScope.get() instanceof Closeable) {
Closeable close = (Closeable) processSpanContext.getData(SCOPE_KEY).get();
try {
close.close();
tracerProvider.endSpan(processSpanContext, signal);
} catch (IOException ioException) {
logger.error(Messages.EVENT_PROCESSOR_RUN_END, ioException);
}
} else {
logger.warning(String.format(Locale.US,
Messages.PROCESS_SPAN_SCOPE_TYPE_ERROR,
spanScope.get() != null ? spanScope.getClass() : "null"));
}
}
} | class PartitionPumpManager {
private final ClientLogger logger = new ClientLogger(PartitionPumpManager.class);
private final CheckpointStore checkpointStore;
private final Map<String, EventHubConsumerAsyncClient> partitionPumps = new ConcurrentHashMap<>();
private final Supplier<PartitionProcessor> partitionProcessorFactory;
private final EventHubClientBuilder eventHubClientBuilder;
private final TracerProvider tracerProvider;
private final boolean trackLastEnqueuedEventProperties;
private final Map<String, EventPosition> initialPartitionEventPosition;
private final Duration maxWaitTime;
private final int maxBatchSize;
private final boolean batchReceiveMode;
/**
* Creates an instance of partition pump manager.
*
* @param checkpointStore The partition manager that is used to store and update checkpoints.
* @param partitionProcessorFactory The partition processor factory that is used to create new instances of {@link
* PartitionProcessor} when new partition pumps are started.
* @param eventHubClientBuilder The client builder used to create new clients (and new connections) for each
* partition processed by this {@link EventProcessorClient}.
* @param trackLastEnqueuedEventProperties If set to {@code true}, all events received by this EventProcessorClient
* will also include the last enqueued event properties for it's respective partitions.
* @param tracerProvider The tracer implementation.
* @param initialPartitionEventPosition Map of initial event positions for partition ids.
* @param maxBatchSize The maximum batch size to receive per users' process handler invocation.
* @param maxWaitTime The maximum time to wait to receive a batch or a single event.
* @param batchReceiveMode The boolean value indicating if this processor is configured to receive in batches or
* single events.
*/
PartitionPumpManager(CheckpointStore checkpointStore,
Supplier<PartitionProcessor> partitionProcessorFactory, EventHubClientBuilder eventHubClientBuilder,
boolean trackLastEnqueuedEventProperties, TracerProvider tracerProvider,
Map<String, EventPosition> initialPartitionEventPosition, int maxBatchSize, Duration maxWaitTime,
boolean batchReceiveMode) {
this.checkpointStore = checkpointStore;
this.partitionProcessorFactory = partitionProcessorFactory;
this.eventHubClientBuilder = eventHubClientBuilder;
this.trackLastEnqueuedEventProperties = trackLastEnqueuedEventProperties;
this.tracerProvider = tracerProvider;
this.initialPartitionEventPosition = initialPartitionEventPosition;
this.maxBatchSize = maxBatchSize;
this.maxWaitTime = maxWaitTime;
this.batchReceiveMode = batchReceiveMode;
}
/**
* Stops all partition pumps that are actively consuming events. This method is invoked when the {@link
* EventProcessorClient} is requested to stop.
*/
void stopAllPartitionPumps() {
this.partitionPumps.forEach((partitionId, eventHubConsumer) -> {
try {
eventHubConsumer.close();
} catch (Exception ex) {
logger.warning(Messages.FAILED_CLOSE_CONSUMER_PARTITION, partitionId, ex);
} finally {
partitionPumps.remove(partitionId);
}
});
}
/**
* Starts a new partition pump for the newly claimed partition. If the partition already has an active partition
* pump, this will not create a new consumer.
*
* @param claimedOwnership The details of partition ownership for which new partition pump is requested to start.
*/
private void processEvent(PartitionContext partitionContext, PartitionProcessor partitionProcessor,
EventHubConsumerAsyncClient eventHubConsumer, EventContext eventContext) {
EventData eventData = eventContext.getEventData();
Context processSpanContext = startProcessTracingSpan(eventData, eventHubConsumer.getEventHubName(),
eventHubConsumer.getFullyQualifiedNamespace());
if (processSpanContext.getData(SPAN_CONTEXT_KEY).isPresent()) {
eventData.addContext(SPAN_CONTEXT_KEY, processSpanContext);
}
try {
partitionProcessor.processEvent(new EventContext(partitionContext, eventData, checkpointStore,
eventContext.getLastEnqueuedEventProperties()));
endProcessTracingSpan(processSpanContext, Signal.complete());
} catch (Throwable throwable) {
/* user code for event processing threw an exception - log and bubble up */
endProcessTracingSpan(processSpanContext, Signal.error(throwable));
throw logger.logExceptionAsError(new PartitionProcessorException("Error in event processing callback",
throwable));
}
}
private void processEventBatch(PartitionContext partitionContext, PartitionProcessor partitionProcessor,
EventHubConsumerAsyncClient eventHubConsumer, List<PartitionEvent> partitionEventBatch) {
List<EventContext> eventContextBatch = partitionEventBatch.stream()
.map(partitionEvent -> new EventContext(partitionContext, partitionEvent.getData(), checkpointStore,
partitionEvent.getLastEnqueuedEventProperties()))
.collect(Collectors.toList());
try {
if (!batchReceiveMode && maxBatchSize == 1) {
EventContext eventContext;
if (eventContextBatch.isEmpty()) {
eventContext = new EventContext(partitionContext, null, checkpointStore, null);
partitionProcessor.processEvent(eventContext);
} else {
processEvent(partitionContext, partitionProcessor, eventHubConsumer, eventContextBatch.get(0));
}
} else {
partitionProcessor.processEventBatch(eventContextBatch);
}
} catch (Throwable throwable) {
/* user code for event processing threw an exception - log and bubble up */
throw logger.logExceptionAsError(new PartitionProcessorException("Error in event processing callback",
throwable));
}
}
Map<String, EventHubConsumerAsyncClient> getPartitionPumps() {
return this.partitionPumps;
}
private void handleError(PartitionOwnership claimedOwnership, EventHubConsumerAsyncClient eventHubConsumer,
PartitionProcessor partitionProcessor, Throwable throwable, PartitionContext partitionContext) {
boolean shouldRethrow = true;
if (!(throwable instanceof PartitionProcessorException)) {
shouldRethrow = false;
logger.warning("Error receiving events from partition {}", partitionContext.getPartitionId(), throwable);
partitionProcessor.processError(new ErrorContext(partitionContext, throwable));
}
CloseReason closeReason = CloseReason.LOST_PARTITION_OWNERSHIP;
partitionProcessor.close(new CloseContext(partitionContext, closeReason));
cleanup(claimedOwnership, eventHubConsumer);
if (shouldRethrow) {
PartitionProcessorException exception = (PartitionProcessorException) throwable;
throw logger.logExceptionAsError(exception);
}
}
private void cleanup(PartitionOwnership claimedOwnership, EventHubConsumerAsyncClient eventHubConsumer) {
try {
logger.info("Closing consumer for partition id {}", claimedOwnership.getPartitionId());
eventHubConsumer.close();
} finally {
logger.info("Removing partition id {} from list of processing partitions",
claimedOwnership.getPartitionId());
partitionPumps.remove(claimedOwnership.getPartitionId());
}
}
/*
* Starts a new process tracing span and attaches the returned context to the EventData object for users.
*/
private Context startProcessTracingSpan(EventData eventData, String eventHubName, String fullyQualifiedNamespace) {
Object diagnosticId = eventData.getProperties().get(DIAGNOSTIC_ID_KEY);
if (diagnosticId == null || !tracerProvider.isEnabled()) {
return Context.NONE;
}
Context spanContext = tracerProvider.extractContext(diagnosticId.toString(), Context.NONE)
.addData(ENTITY_PATH_KEY, eventHubName)
.addData(HOST_NAME_KEY, fullyQualifiedNamespace)
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
spanContext = eventData.getEnqueuedTime() == null
? spanContext
: spanContext.addData(MESSAGE_ENQUEUED_TIME, eventData.getEnqueuedTime().getEpochSecond());
return tracerProvider.startSpan(spanContext, ProcessKind.PROCESS);
}
/*
* Ends the process tracing span and the scope of that span.
*/
private void endProcessTracingSpan(Context processSpanContext, Signal<Void> signal) {
Optional<Object> spanScope = processSpanContext.getData(SCOPE_KEY);
if (!spanScope.isPresent() || !tracerProvider.isEnabled()) {
return;
}
if (spanScope.get() instanceof Closeable) {
Closeable close = (Closeable) processSpanContext.getData(SCOPE_KEY).get();
try {
close.close();
tracerProvider.endSpan(processSpanContext, signal);
} catch (IOException ioException) {
logger.error(Messages.EVENT_PROCESSOR_RUN_END, ioException);
}
} else {
logger.warning(String.format(Locale.US,
Messages.PROCESS_SPAN_SCOPE_TYPE_ERROR,
spanScope.get() != null ? spanScope.getClass() : "null"));
}
}
} |
@milismsft we need to have explicit automated tests for partition split. We have some tests for partition split that you can use as a sample: DocumentProducerTest (unit test) ReadMyWritesConsistencyTest (integration test) | public Mono<Void> run(CancellationToken cancellationToken) {
this.lastContinuation = this.settings.getStartContinuation();
this.isFirstQueryForChangeFeeds = true;
this.options.requestContinuation(this.lastContinuation);
return Flux.just(this)
.flatMap( value -> {
if (cancellationToken.isCancellationRequested()) {
return Flux.empty();
}
if(this.isFirstQueryForChangeFeeds) {
this.isFirstQueryForChangeFeeds = false;
return Flux.just(value);
}
ZonedDateTime stopTimer = ZonedDateTime.now().plus(this.settings.getFeedPollDelay());
return Mono.just(value)
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).last();
})
.flatMap(value -> this.documentClient.createDocumentChangeFeedQuery(this.settings.getCollectionSelfLink(), this.options)
.limitRequest(1)
)
.flatMap(documentFeedResponse -> {
if (cancellationToken.isCancellationRequested()) return Flux.error(new TaskCancelledException());
this.lastContinuation = documentFeedResponse.continuationToken();
if (documentFeedResponse.results() != null && documentFeedResponse.results().size() > 0) {
return this.dispatchChanges(documentFeedResponse)
.doOnError(throwable -> {
logger.debug("Exception was thrown from thread {}", Thread.currentThread().getId(), throwable);
})
.doOnSuccess((Void) -> {
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) throw new TaskCancelledException();
});
}
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) {
return Flux.error(new TaskCancelledException());
}
return Flux.empty();
})
.doOnComplete(() -> {
if (this.options.maxItemCount().compareTo(this.settings.getMaxItemCount()) != 0) {
this.options.maxItemCount(this.settings.getMaxItemCount());
}
})
.onErrorResume(throwable -> {
if (throwable instanceof CosmosClientException) {
CosmosClientException clientException = (CosmosClientException) throwable;
logger.warn("CosmosClientException: partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), clientException);
StatusCodeErrorType docDbError = ExceptionClassifier.classifyClientException(clientException);
switch (docDbError) {
case PARTITION_NOT_FOUND: {
this.resultException = new PartitionNotFoundException("Partition not found.", this.lastContinuation);
break;
}
case PARTITION_SPLIT: {
this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation);
break;
}
case UNDEFINED: {
this.resultException = new RuntimeException(clientException);
break;
}
case MAX_ITEM_COUNT_TOO_LARGE: {
if (this.options.maxItemCount() == null) {
this.options.maxItemCount(DefaultMaxItemCount);
} else if (this.options.maxItemCount() <= 1) {
logger.error("Cannot reduce maxItemCount further as it's already at {}", this.options.maxItemCount(), clientException);
this.resultException = new RuntimeException(clientException);
}
this.options.maxItemCount(this.options.maxItemCount() / 2);
logger.warn("Reducing maxItemCount, new value: {}", this.options.maxItemCount());
return Flux.empty();
}
case TRANSIENT_ERROR: {
if (clientException.retryAfterInMilliseconds() > 0) {
ZonedDateTime stopTimer = ZonedDateTime.now().plus(clientException.retryAfterInMilliseconds(), MILLIS);
return Mono.just(clientException.retryAfterInMilliseconds())
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).flatMap( values -> Flux.empty());
}
break;
}
default: {
logger.error("Unrecognized Cosmos exception returned error code {}", docDbError, clientException);
this.resultException = new RuntimeException(clientException);
break;
}
}
} else if (throwable instanceof LeaseLostException) {
logger.info("LeaseLoseException with partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId());
this.resultException = (LeaseLostException) throwable;
} else if (throwable instanceof TaskCancelledException) {
logger.debug("Task cancelled exception: partition {} from {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), throwable);
this.resultException = (TaskCancelledException) throwable;
} else {
logger.warn("Unexpected exception from thread {}", Thread.currentThread().getId(), throwable);
this.resultException = new RuntimeException(throwable);
}
return Flux.error(throwable);
})
.repeat(() -> {
if (cancellationToken.isCancellationRequested()) {
this.resultException = new TaskCancelledException();
return false;
}
return true;
})
.onErrorResume(throwable -> {
if (this.resultException == null) {
this.resultException = new RuntimeException(throwable);
}
return Flux.empty();
}).then();
} | this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation); | public Mono<Void> run(CancellationToken cancellationToken) {
this.lastContinuation = this.settings.getStartContinuation();
this.isFirstQueryForChangeFeeds = true;
this.options.requestContinuation(this.lastContinuation);
return Flux.just(this)
.flatMap( value -> {
if (cancellationToken.isCancellationRequested()) {
return Flux.empty();
}
if(this.isFirstQueryForChangeFeeds) {
this.isFirstQueryForChangeFeeds = false;
return Flux.just(value);
}
ZonedDateTime stopTimer = ZonedDateTime.now().plus(this.settings.getFeedPollDelay());
return Mono.just(value)
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).last();
})
.flatMap(value -> this.documentClient.createDocumentChangeFeedQuery(this.settings.getCollectionSelfLink(), this.options)
.limitRequest(1)
)
.flatMap(documentFeedResponse -> {
if (cancellationToken.isCancellationRequested()) return Flux.error(new TaskCancelledException());
this.lastContinuation = documentFeedResponse.continuationToken();
if (documentFeedResponse.results() != null && documentFeedResponse.results().size() > 0) {
return this.dispatchChanges(documentFeedResponse)
.doOnError(throwable -> {
logger.debug("Exception was thrown from thread {}", Thread.currentThread().getId(), throwable);
})
.doOnSuccess((Void) -> {
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) throw new TaskCancelledException();
});
}
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) {
return Flux.error(new TaskCancelledException());
}
return Flux.empty();
})
.doOnComplete(() -> {
if (this.options.maxItemCount().compareTo(this.settings.getMaxItemCount()) != 0) {
this.options.maxItemCount(this.settings.getMaxItemCount());
}
})
.onErrorResume(throwable -> {
if (throwable instanceof CosmosClientException) {
CosmosClientException clientException = (CosmosClientException) throwable;
logger.warn("CosmosClientException: partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), clientException);
StatusCodeErrorType docDbError = ExceptionClassifier.classifyClientException(clientException);
switch (docDbError) {
case PARTITION_NOT_FOUND: {
this.resultException = new PartitionNotFoundException("Partition not found.", this.lastContinuation);
break;
}
case PARTITION_SPLIT: {
this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation);
break;
}
case UNDEFINED: {
this.resultException = new RuntimeException(clientException);
break;
}
case MAX_ITEM_COUNT_TOO_LARGE: {
if (this.options.maxItemCount() == null) {
this.options.maxItemCount(DefaultMaxItemCount);
} else if (this.options.maxItemCount() <= 1) {
logger.error("Cannot reduce maxItemCount further as it's already at {}", this.options.maxItemCount(), clientException);
this.resultException = new RuntimeException(clientException);
}
this.options.maxItemCount(this.options.maxItemCount() / 2);
logger.warn("Reducing maxItemCount, new value: {}", this.options.maxItemCount());
return Flux.empty();
}
case TRANSIENT_ERROR: {
if (clientException.retryAfterInMilliseconds() > 0) {
ZonedDateTime stopTimer = ZonedDateTime.now().plus(clientException.retryAfterInMilliseconds(), MILLIS);
return Mono.just(clientException.retryAfterInMilliseconds())
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).flatMap( values -> Flux.empty());
}
break;
}
default: {
logger.error("Unrecognized Cosmos exception returned error code {}", docDbError, clientException);
this.resultException = new RuntimeException(clientException);
break;
}
}
} else if (throwable instanceof LeaseLostException) {
logger.info("LeaseLoseException with partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId());
this.resultException = (LeaseLostException) throwable;
} else if (throwable instanceof TaskCancelledException) {
logger.debug("Task cancelled exception: partition {} from {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), throwable);
this.resultException = (TaskCancelledException) throwable;
} else {
logger.warn("Unexpected exception from thread {}", Thread.currentThread().getId(), throwable);
this.resultException = new RuntimeException(throwable);
}
return Flux.error(throwable);
})
.repeat(() -> {
if (cancellationToken.isCancellationRequested()) {
this.resultException = new TaskCancelledException();
return false;
}
return true;
})
.onErrorResume(throwable -> {
if (this.resultException == null) {
this.resultException = new RuntimeException(throwable);
}
return Flux.empty();
}).then();
} | class PartitionProcessorImpl implements PartitionProcessor {
private static final Logger logger = LoggerFactory.getLogger(PartitionProcessorImpl.class);
private static final int DefaultMaxItemCount = 100;
private final ProcessorSettings settings;
private final PartitionCheckpointer checkpointer;
private final ChangeFeedObserver observer;
private final ChangeFeedOptions options;
private final ChangeFeedContextClient documentClient;
private volatile RuntimeException resultException;
private volatile String lastContinuation;
private volatile boolean isFirstQueryForChangeFeeds;
public PartitionProcessorImpl(ChangeFeedObserver observer, ChangeFeedContextClient documentClient, ProcessorSettings settings, PartitionCheckpointer checkpointer) {
this.observer = observer;
this.documentClient = documentClient;
this.settings = settings;
this.checkpointer = checkpointer;
this.options = new ChangeFeedOptions();
this.options.maxItemCount(settings.getMaxItemCount());
partitionKeyRangeIdInternal(this.options, settings.getPartitionKeyRangeId());
this.options.startFromBeginning(settings.isStartFromBeginning());
this.options.requestContinuation(settings.getStartContinuation());
this.options.startDateTime(settings.getStartTime());
}
@Override
@Override
public RuntimeException getResultException() {
return this.resultException;
}
private Mono<Void> dispatchChanges(FeedResponse<CosmosItemProperties> response) {
ChangeFeedObserverContext context = new ChangeFeedObserverContextImpl(this.settings.getPartitionKeyRangeId(), response, this.checkpointer);
return this.observer.processChanges(context, response.results());
}
} | class PartitionProcessorImpl implements PartitionProcessor {
private static final Logger logger = LoggerFactory.getLogger(PartitionProcessorImpl.class);
private static final int DefaultMaxItemCount = 100;
private final ProcessorSettings settings;
private final PartitionCheckpointer checkpointer;
private final ChangeFeedObserver observer;
private final ChangeFeedOptions options;
private final ChangeFeedContextClient documentClient;
private volatile RuntimeException resultException;
private volatile String lastContinuation;
private volatile boolean isFirstQueryForChangeFeeds;
public PartitionProcessorImpl(ChangeFeedObserver observer, ChangeFeedContextClient documentClient, ProcessorSettings settings, PartitionCheckpointer checkpointer) {
this.observer = observer;
this.documentClient = documentClient;
this.settings = settings;
this.checkpointer = checkpointer;
this.options = new ChangeFeedOptions();
this.options.maxItemCount(settings.getMaxItemCount());
partitionKeyRangeIdInternal(this.options, settings.getPartitionKeyRangeId());
this.options.startFromBeginning(settings.isStartFromBeginning());
this.options.requestContinuation(settings.getStartContinuation());
this.options.startDateTime(settings.getStartTime());
}
@Override
@Override
public RuntimeException getResultException() {
return this.resultException;
}
private Mono<Void> dispatchChanges(FeedResponse<CosmosItemProperties> response) {
ChangeFeedObserverContext context = new ChangeFeedObserverContextImpl(this.settings.getPartitionKeyRangeId(), response, this.checkpointer);
return this.observer.processChanges(context, response.results());
}
} |
I've added a specific test for splits in V4 respective change. The challenge is that public emulator does not support splitting so the test must be run against the cloud service. | public Mono<Void> run(CancellationToken cancellationToken) {
this.lastContinuation = this.settings.getStartContinuation();
this.isFirstQueryForChangeFeeds = true;
this.options.requestContinuation(this.lastContinuation);
return Flux.just(this)
.flatMap( value -> {
if (cancellationToken.isCancellationRequested()) {
return Flux.empty();
}
if(this.isFirstQueryForChangeFeeds) {
this.isFirstQueryForChangeFeeds = false;
return Flux.just(value);
}
ZonedDateTime stopTimer = ZonedDateTime.now().plus(this.settings.getFeedPollDelay());
return Mono.just(value)
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).last();
})
.flatMap(value -> this.documentClient.createDocumentChangeFeedQuery(this.settings.getCollectionSelfLink(), this.options)
.limitRequest(1)
)
.flatMap(documentFeedResponse -> {
if (cancellationToken.isCancellationRequested()) return Flux.error(new TaskCancelledException());
this.lastContinuation = documentFeedResponse.continuationToken();
if (documentFeedResponse.results() != null && documentFeedResponse.results().size() > 0) {
return this.dispatchChanges(documentFeedResponse)
.doOnError(throwable -> {
logger.debug("Exception was thrown from thread {}", Thread.currentThread().getId(), throwable);
})
.doOnSuccess((Void) -> {
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) throw new TaskCancelledException();
});
}
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) {
return Flux.error(new TaskCancelledException());
}
return Flux.empty();
})
.doOnComplete(() -> {
if (this.options.maxItemCount().compareTo(this.settings.getMaxItemCount()) != 0) {
this.options.maxItemCount(this.settings.getMaxItemCount());
}
})
.onErrorResume(throwable -> {
if (throwable instanceof CosmosClientException) {
CosmosClientException clientException = (CosmosClientException) throwable;
logger.warn("CosmosClientException: partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), clientException);
StatusCodeErrorType docDbError = ExceptionClassifier.classifyClientException(clientException);
switch (docDbError) {
case PARTITION_NOT_FOUND: {
this.resultException = new PartitionNotFoundException("Partition not found.", this.lastContinuation);
break;
}
case PARTITION_SPLIT: {
this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation);
break;
}
case UNDEFINED: {
this.resultException = new RuntimeException(clientException);
break;
}
case MAX_ITEM_COUNT_TOO_LARGE: {
if (this.options.maxItemCount() == null) {
this.options.maxItemCount(DefaultMaxItemCount);
} else if (this.options.maxItemCount() <= 1) {
logger.error("Cannot reduce maxItemCount further as it's already at {}", this.options.maxItemCount(), clientException);
this.resultException = new RuntimeException(clientException);
}
this.options.maxItemCount(this.options.maxItemCount() / 2);
logger.warn("Reducing maxItemCount, new value: {}", this.options.maxItemCount());
return Flux.empty();
}
case TRANSIENT_ERROR: {
if (clientException.retryAfterInMilliseconds() > 0) {
ZonedDateTime stopTimer = ZonedDateTime.now().plus(clientException.retryAfterInMilliseconds(), MILLIS);
return Mono.just(clientException.retryAfterInMilliseconds())
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).flatMap( values -> Flux.empty());
}
break;
}
default: {
logger.error("Unrecognized Cosmos exception returned error code {}", docDbError, clientException);
this.resultException = new RuntimeException(clientException);
break;
}
}
} else if (throwable instanceof LeaseLostException) {
logger.info("LeaseLoseException with partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId());
this.resultException = (LeaseLostException) throwable;
} else if (throwable instanceof TaskCancelledException) {
logger.debug("Task cancelled exception: partition {} from {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), throwable);
this.resultException = (TaskCancelledException) throwable;
} else {
logger.warn("Unexpected exception from thread {}", Thread.currentThread().getId(), throwable);
this.resultException = new RuntimeException(throwable);
}
return Flux.error(throwable);
})
.repeat(() -> {
if (cancellationToken.isCancellationRequested()) {
this.resultException = new TaskCancelledException();
return false;
}
return true;
})
.onErrorResume(throwable -> {
if (this.resultException == null) {
this.resultException = new RuntimeException(throwable);
}
return Flux.empty();
}).then();
} | this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation); | public Mono<Void> run(CancellationToken cancellationToken) {
this.lastContinuation = this.settings.getStartContinuation();
this.isFirstQueryForChangeFeeds = true;
this.options.requestContinuation(this.lastContinuation);
return Flux.just(this)
.flatMap( value -> {
if (cancellationToken.isCancellationRequested()) {
return Flux.empty();
}
if(this.isFirstQueryForChangeFeeds) {
this.isFirstQueryForChangeFeeds = false;
return Flux.just(value);
}
ZonedDateTime stopTimer = ZonedDateTime.now().plus(this.settings.getFeedPollDelay());
return Mono.just(value)
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).last();
})
.flatMap(value -> this.documentClient.createDocumentChangeFeedQuery(this.settings.getCollectionSelfLink(), this.options)
.limitRequest(1)
)
.flatMap(documentFeedResponse -> {
if (cancellationToken.isCancellationRequested()) return Flux.error(new TaskCancelledException());
this.lastContinuation = documentFeedResponse.continuationToken();
if (documentFeedResponse.results() != null && documentFeedResponse.results().size() > 0) {
return this.dispatchChanges(documentFeedResponse)
.doOnError(throwable -> {
logger.debug("Exception was thrown from thread {}", Thread.currentThread().getId(), throwable);
})
.doOnSuccess((Void) -> {
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) throw new TaskCancelledException();
});
}
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) {
return Flux.error(new TaskCancelledException());
}
return Flux.empty();
})
.doOnComplete(() -> {
if (this.options.maxItemCount().compareTo(this.settings.getMaxItemCount()) != 0) {
this.options.maxItemCount(this.settings.getMaxItemCount());
}
})
.onErrorResume(throwable -> {
if (throwable instanceof CosmosClientException) {
CosmosClientException clientException = (CosmosClientException) throwable;
logger.warn("CosmosClientException: partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), clientException);
StatusCodeErrorType docDbError = ExceptionClassifier.classifyClientException(clientException);
switch (docDbError) {
case PARTITION_NOT_FOUND: {
this.resultException = new PartitionNotFoundException("Partition not found.", this.lastContinuation);
break;
}
case PARTITION_SPLIT: {
this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation);
break;
}
case UNDEFINED: {
this.resultException = new RuntimeException(clientException);
break;
}
case MAX_ITEM_COUNT_TOO_LARGE: {
if (this.options.maxItemCount() == null) {
this.options.maxItemCount(DefaultMaxItemCount);
} else if (this.options.maxItemCount() <= 1) {
logger.error("Cannot reduce maxItemCount further as it's already at {}", this.options.maxItemCount(), clientException);
this.resultException = new RuntimeException(clientException);
}
this.options.maxItemCount(this.options.maxItemCount() / 2);
logger.warn("Reducing maxItemCount, new value: {}", this.options.maxItemCount());
return Flux.empty();
}
case TRANSIENT_ERROR: {
if (clientException.retryAfterInMilliseconds() > 0) {
ZonedDateTime stopTimer = ZonedDateTime.now().plus(clientException.retryAfterInMilliseconds(), MILLIS);
return Mono.just(clientException.retryAfterInMilliseconds())
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).flatMap( values -> Flux.empty());
}
break;
}
default: {
logger.error("Unrecognized Cosmos exception returned error code {}", docDbError, clientException);
this.resultException = new RuntimeException(clientException);
break;
}
}
} else if (throwable instanceof LeaseLostException) {
logger.info("LeaseLoseException with partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId());
this.resultException = (LeaseLostException) throwable;
} else if (throwable instanceof TaskCancelledException) {
logger.debug("Task cancelled exception: partition {} from {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), throwable);
this.resultException = (TaskCancelledException) throwable;
} else {
logger.warn("Unexpected exception from thread {}", Thread.currentThread().getId(), throwable);
this.resultException = new RuntimeException(throwable);
}
return Flux.error(throwable);
})
.repeat(() -> {
if (cancellationToken.isCancellationRequested()) {
this.resultException = new TaskCancelledException();
return false;
}
return true;
})
.onErrorResume(throwable -> {
if (this.resultException == null) {
this.resultException = new RuntimeException(throwable);
}
return Flux.empty();
}).then();
} | class PartitionProcessorImpl implements PartitionProcessor {
private static final Logger logger = LoggerFactory.getLogger(PartitionProcessorImpl.class);
private static final int DefaultMaxItemCount = 100;
private final ProcessorSettings settings;
private final PartitionCheckpointer checkpointer;
private final ChangeFeedObserver observer;
private final ChangeFeedOptions options;
private final ChangeFeedContextClient documentClient;
private volatile RuntimeException resultException;
private volatile String lastContinuation;
private volatile boolean isFirstQueryForChangeFeeds;
public PartitionProcessorImpl(ChangeFeedObserver observer, ChangeFeedContextClient documentClient, ProcessorSettings settings, PartitionCheckpointer checkpointer) {
this.observer = observer;
this.documentClient = documentClient;
this.settings = settings;
this.checkpointer = checkpointer;
this.options = new ChangeFeedOptions();
this.options.maxItemCount(settings.getMaxItemCount());
partitionKeyRangeIdInternal(this.options, settings.getPartitionKeyRangeId());
this.options.startFromBeginning(settings.isStartFromBeginning());
this.options.requestContinuation(settings.getStartContinuation());
this.options.startDateTime(settings.getStartTime());
}
@Override
@Override
public RuntimeException getResultException() {
return this.resultException;
}
private Mono<Void> dispatchChanges(FeedResponse<CosmosItemProperties> response) {
ChangeFeedObserverContext context = new ChangeFeedObserverContextImpl(this.settings.getPartitionKeyRangeId(), response, this.checkpointer);
return this.observer.processChanges(context, response.results());
}
} | class PartitionProcessorImpl implements PartitionProcessor {
private static final Logger logger = LoggerFactory.getLogger(PartitionProcessorImpl.class);
private static final int DefaultMaxItemCount = 100;
private final ProcessorSettings settings;
private final PartitionCheckpointer checkpointer;
private final ChangeFeedObserver observer;
private final ChangeFeedOptions options;
private final ChangeFeedContextClient documentClient;
private volatile RuntimeException resultException;
private volatile String lastContinuation;
private volatile boolean isFirstQueryForChangeFeeds;
public PartitionProcessorImpl(ChangeFeedObserver observer, ChangeFeedContextClient documentClient, ProcessorSettings settings, PartitionCheckpointer checkpointer) {
this.observer = observer;
this.documentClient = documentClient;
this.settings = settings;
this.checkpointer = checkpointer;
this.options = new ChangeFeedOptions();
this.options.maxItemCount(settings.getMaxItemCount());
partitionKeyRangeIdInternal(this.options, settings.getPartitionKeyRangeId());
this.options.startFromBeginning(settings.isStartFromBeginning());
this.options.requestContinuation(settings.getStartContinuation());
this.options.startDateTime(settings.getStartTime());
}
@Override
@Override
public RuntimeException getResultException() {
return this.resultException;
}
private Mono<Void> dispatchChanges(FeedResponse<CosmosItemProperties> response) {
ChangeFeedObserverContext context = new ChangeFeedObserverContextImpl(this.settings.getPartitionKeyRangeId(), response, this.checkpointer);
return this.observer.processChanges(context, response.results());
}
} |
if we add tests on v4 that should be good enough. thanks. | public Mono<Void> run(CancellationToken cancellationToken) {
this.lastContinuation = this.settings.getStartContinuation();
this.isFirstQueryForChangeFeeds = true;
this.options.requestContinuation(this.lastContinuation);
return Flux.just(this)
.flatMap( value -> {
if (cancellationToken.isCancellationRequested()) {
return Flux.empty();
}
if(this.isFirstQueryForChangeFeeds) {
this.isFirstQueryForChangeFeeds = false;
return Flux.just(value);
}
ZonedDateTime stopTimer = ZonedDateTime.now().plus(this.settings.getFeedPollDelay());
return Mono.just(value)
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).last();
})
.flatMap(value -> this.documentClient.createDocumentChangeFeedQuery(this.settings.getCollectionSelfLink(), this.options)
.limitRequest(1)
)
.flatMap(documentFeedResponse -> {
if (cancellationToken.isCancellationRequested()) return Flux.error(new TaskCancelledException());
this.lastContinuation = documentFeedResponse.continuationToken();
if (documentFeedResponse.results() != null && documentFeedResponse.results().size() > 0) {
return this.dispatchChanges(documentFeedResponse)
.doOnError(throwable -> {
logger.debug("Exception was thrown from thread {}", Thread.currentThread().getId(), throwable);
})
.doOnSuccess((Void) -> {
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) throw new TaskCancelledException();
});
}
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) {
return Flux.error(new TaskCancelledException());
}
return Flux.empty();
})
.doOnComplete(() -> {
if (this.options.maxItemCount().compareTo(this.settings.getMaxItemCount()) != 0) {
this.options.maxItemCount(this.settings.getMaxItemCount());
}
})
.onErrorResume(throwable -> {
if (throwable instanceof CosmosClientException) {
CosmosClientException clientException = (CosmosClientException) throwable;
logger.warn("CosmosClientException: partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), clientException);
StatusCodeErrorType docDbError = ExceptionClassifier.classifyClientException(clientException);
switch (docDbError) {
case PARTITION_NOT_FOUND: {
this.resultException = new PartitionNotFoundException("Partition not found.", this.lastContinuation);
break;
}
case PARTITION_SPLIT: {
this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation);
break;
}
case UNDEFINED: {
this.resultException = new RuntimeException(clientException);
break;
}
case MAX_ITEM_COUNT_TOO_LARGE: {
if (this.options.maxItemCount() == null) {
this.options.maxItemCount(DefaultMaxItemCount);
} else if (this.options.maxItemCount() <= 1) {
logger.error("Cannot reduce maxItemCount further as it's already at {}", this.options.maxItemCount(), clientException);
this.resultException = new RuntimeException(clientException);
}
this.options.maxItemCount(this.options.maxItemCount() / 2);
logger.warn("Reducing maxItemCount, new value: {}", this.options.maxItemCount());
return Flux.empty();
}
case TRANSIENT_ERROR: {
if (clientException.retryAfterInMilliseconds() > 0) {
ZonedDateTime stopTimer = ZonedDateTime.now().plus(clientException.retryAfterInMilliseconds(), MILLIS);
return Mono.just(clientException.retryAfterInMilliseconds())
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).flatMap( values -> Flux.empty());
}
break;
}
default: {
logger.error("Unrecognized Cosmos exception returned error code {}", docDbError, clientException);
this.resultException = new RuntimeException(clientException);
break;
}
}
} else if (throwable instanceof LeaseLostException) {
logger.info("LeaseLoseException with partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId());
this.resultException = (LeaseLostException) throwable;
} else if (throwable instanceof TaskCancelledException) {
logger.debug("Task cancelled exception: partition {} from {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), throwable);
this.resultException = (TaskCancelledException) throwable;
} else {
logger.warn("Unexpected exception from thread {}", Thread.currentThread().getId(), throwable);
this.resultException = new RuntimeException(throwable);
}
return Flux.error(throwable);
})
.repeat(() -> {
if (cancellationToken.isCancellationRequested()) {
this.resultException = new TaskCancelledException();
return false;
}
return true;
})
.onErrorResume(throwable -> {
if (this.resultException == null) {
this.resultException = new RuntimeException(throwable);
}
return Flux.empty();
}).then();
} | this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation); | public Mono<Void> run(CancellationToken cancellationToken) {
this.lastContinuation = this.settings.getStartContinuation();
this.isFirstQueryForChangeFeeds = true;
this.options.requestContinuation(this.lastContinuation);
return Flux.just(this)
.flatMap( value -> {
if (cancellationToken.isCancellationRequested()) {
return Flux.empty();
}
if(this.isFirstQueryForChangeFeeds) {
this.isFirstQueryForChangeFeeds = false;
return Flux.just(value);
}
ZonedDateTime stopTimer = ZonedDateTime.now().plus(this.settings.getFeedPollDelay());
return Mono.just(value)
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).last();
})
.flatMap(value -> this.documentClient.createDocumentChangeFeedQuery(this.settings.getCollectionSelfLink(), this.options)
.limitRequest(1)
)
.flatMap(documentFeedResponse -> {
if (cancellationToken.isCancellationRequested()) return Flux.error(new TaskCancelledException());
this.lastContinuation = documentFeedResponse.continuationToken();
if (documentFeedResponse.results() != null && documentFeedResponse.results().size() > 0) {
return this.dispatchChanges(documentFeedResponse)
.doOnError(throwable -> {
logger.debug("Exception was thrown from thread {}", Thread.currentThread().getId(), throwable);
})
.doOnSuccess((Void) -> {
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) throw new TaskCancelledException();
});
}
this.options.requestContinuation(this.lastContinuation);
if (cancellationToken.isCancellationRequested()) {
return Flux.error(new TaskCancelledException());
}
return Flux.empty();
})
.doOnComplete(() -> {
if (this.options.maxItemCount().compareTo(this.settings.getMaxItemCount()) != 0) {
this.options.maxItemCount(this.settings.getMaxItemCount());
}
})
.onErrorResume(throwable -> {
if (throwable instanceof CosmosClientException) {
CosmosClientException clientException = (CosmosClientException) throwable;
logger.warn("CosmosClientException: partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), clientException);
StatusCodeErrorType docDbError = ExceptionClassifier.classifyClientException(clientException);
switch (docDbError) {
case PARTITION_NOT_FOUND: {
this.resultException = new PartitionNotFoundException("Partition not found.", this.lastContinuation);
break;
}
case PARTITION_SPLIT: {
this.resultException = new PartitionSplitException("Partition split.", this.lastContinuation);
break;
}
case UNDEFINED: {
this.resultException = new RuntimeException(clientException);
break;
}
case MAX_ITEM_COUNT_TOO_LARGE: {
if (this.options.maxItemCount() == null) {
this.options.maxItemCount(DefaultMaxItemCount);
} else if (this.options.maxItemCount() <= 1) {
logger.error("Cannot reduce maxItemCount further as it's already at {}", this.options.maxItemCount(), clientException);
this.resultException = new RuntimeException(clientException);
}
this.options.maxItemCount(this.options.maxItemCount() / 2);
logger.warn("Reducing maxItemCount, new value: {}", this.options.maxItemCount());
return Flux.empty();
}
case TRANSIENT_ERROR: {
if (clientException.retryAfterInMilliseconds() > 0) {
ZonedDateTime stopTimer = ZonedDateTime.now().plus(clientException.retryAfterInMilliseconds(), MILLIS);
return Mono.just(clientException.retryAfterInMilliseconds())
.delayElement(Duration.ofMillis(100))
.repeat( () -> {
ZonedDateTime currentTime = ZonedDateTime.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
}).flatMap( values -> Flux.empty());
}
break;
}
default: {
logger.error("Unrecognized Cosmos exception returned error code {}", docDbError, clientException);
this.resultException = new RuntimeException(clientException);
break;
}
}
} else if (throwable instanceof LeaseLostException) {
logger.info("LeaseLoseException with partition {} from thread {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId());
this.resultException = (LeaseLostException) throwable;
} else if (throwable instanceof TaskCancelledException) {
logger.debug("Task cancelled exception: partition {} from {}",
this.settings.getPartitionKeyRangeId(), Thread.currentThread().getId(), throwable);
this.resultException = (TaskCancelledException) throwable;
} else {
logger.warn("Unexpected exception from thread {}", Thread.currentThread().getId(), throwable);
this.resultException = new RuntimeException(throwable);
}
return Flux.error(throwable);
})
.repeat(() -> {
if (cancellationToken.isCancellationRequested()) {
this.resultException = new TaskCancelledException();
return false;
}
return true;
})
.onErrorResume(throwable -> {
if (this.resultException == null) {
this.resultException = new RuntimeException(throwable);
}
return Flux.empty();
}).then();
} | class PartitionProcessorImpl implements PartitionProcessor {
private static final Logger logger = LoggerFactory.getLogger(PartitionProcessorImpl.class);
private static final int DefaultMaxItemCount = 100;
private final ProcessorSettings settings;
private final PartitionCheckpointer checkpointer;
private final ChangeFeedObserver observer;
private final ChangeFeedOptions options;
private final ChangeFeedContextClient documentClient;
private volatile RuntimeException resultException;
private volatile String lastContinuation;
private volatile boolean isFirstQueryForChangeFeeds;
public PartitionProcessorImpl(ChangeFeedObserver observer, ChangeFeedContextClient documentClient, ProcessorSettings settings, PartitionCheckpointer checkpointer) {
this.observer = observer;
this.documentClient = documentClient;
this.settings = settings;
this.checkpointer = checkpointer;
this.options = new ChangeFeedOptions();
this.options.maxItemCount(settings.getMaxItemCount());
partitionKeyRangeIdInternal(this.options, settings.getPartitionKeyRangeId());
this.options.startFromBeginning(settings.isStartFromBeginning());
this.options.requestContinuation(settings.getStartContinuation());
this.options.startDateTime(settings.getStartTime());
}
@Override
@Override
public RuntimeException getResultException() {
return this.resultException;
}
private Mono<Void> dispatchChanges(FeedResponse<CosmosItemProperties> response) {
ChangeFeedObserverContext context = new ChangeFeedObserverContextImpl(this.settings.getPartitionKeyRangeId(), response, this.checkpointer);
return this.observer.processChanges(context, response.results());
}
} | class PartitionProcessorImpl implements PartitionProcessor {
private static final Logger logger = LoggerFactory.getLogger(PartitionProcessorImpl.class);
private static final int DefaultMaxItemCount = 100;
private final ProcessorSettings settings;
private final PartitionCheckpointer checkpointer;
private final ChangeFeedObserver observer;
private final ChangeFeedOptions options;
private final ChangeFeedContextClient documentClient;
private volatile RuntimeException resultException;
private volatile String lastContinuation;
private volatile boolean isFirstQueryForChangeFeeds;
public PartitionProcessorImpl(ChangeFeedObserver observer, ChangeFeedContextClient documentClient, ProcessorSettings settings, PartitionCheckpointer checkpointer) {
this.observer = observer;
this.documentClient = documentClient;
this.settings = settings;
this.checkpointer = checkpointer;
this.options = new ChangeFeedOptions();
this.options.maxItemCount(settings.getMaxItemCount());
partitionKeyRangeIdInternal(this.options, settings.getPartitionKeyRangeId());
this.options.startFromBeginning(settings.isStartFromBeginning());
this.options.requestContinuation(settings.getStartContinuation());
this.options.startDateTime(settings.getStartTime());
}
@Override
@Override
public RuntimeException getResultException() {
return this.resultException;
}
private Mono<Void> dispatchChanges(FeedResponse<CosmosItemProperties> response) {
ChangeFeedObserverContext context = new ChangeFeedObserverContextImpl(this.settings.getPartitionKeyRangeId(), response, this.checkpointer);
return this.observer.processChanges(context, response.results());
}
} |
To save duplicated logic, this method should call the maximal overload. | public IterableStream<ServiceBusReceivedMessage> receive(int maxMessages) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = Flux.create(emitter -> queueWork(maxMessages,
DEFAULT_RECEIVE_WAIT_TIME, emitter));
return new IterableStream<>(messages);
} | if (maxMessages <= 0) { | public IterableStream<ServiceBusReceivedMessage> receive(int maxMessages) {
return receive(maxMessages, operationTimeout);
} | class ServiceBusReceiverClient implements AutoCloseable {
private static final Duration DEFAULT_RECEIVE_WAIT_TIME = Duration.ofMinutes(1);
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverClient.class);
private final AtomicInteger idGenerator = new AtomicInteger();
private final ServiceBusReceiverAsyncClient asyncClient;
private final Duration operationTimeout;
/**
* Creates a synchronous receiver given its asynchronous counterpart.
*
* @param asyncClient Asynchronous receiver.
*/
ServiceBusReceiverClient(ServiceBusReceiverAsyncClient asyncClient, Duration operationTimeout) {
this.asyncClient = Objects.requireNonNull(asyncClient, "'asyncClient' cannot be null.");
this.operationTimeout = Objects.requireNonNull(operationTimeout, "'operationTimeout' cannot be null.");
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return asyncClient.getFullyQualifiedNamespace();
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return asyncClient.getEntityPath();
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void abandon(MessageLockToken lockToken) {
asyncClient.abandon(lockToken).block(operationTimeout);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
asyncClient.abandon(lockToken, propertiesToModify).block(operationTimeout);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void complete(MessageLockToken lockToken) {
asyncClient.complete(lockToken).block(operationTimeout);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public void defer(MessageLockToken lockToken) {
asyncClient.defer(lockToken).block(operationTimeout);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public void defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
asyncClient.defer(lockToken, propertiesToModify).block(operationTimeout);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public void deadLetter(MessageLockToken lockToken) {
asyncClient.deadLetter(lockToken).block(operationTimeout);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
asyncClient.deadLetter(lockToken, deadLetterOptions).block(operationTimeout);
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public ServiceBusReceivedMessage peek() {
return asyncClient.peek().block(operationTimeout);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public ServiceBusReceivedMessage peekAt(long sequenceNumber) {
return asyncClient.peekAt(sequenceNumber).block(operationTimeout);
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public IterableStream<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = asyncClient.peekBatch(maxMessages)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public IterableStream<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = asyncClient.peekBatchAt(maxMessages, sequenceNumber)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Receives an iterable stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
* The receive operation will wait for a default 1 minute for receiving a message before it times out. You can it
* override by using {@link
*
* @param maxMessages The maximum number of messages to receive.
* @return An {@link IterableStream} of at most {@code maxMessages} messages from the Service Bus entity.
*
* @throws IllegalArgumentException if {@code maxMessages} is zero or a negative value.
*/
/**
* Receives an iterable stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @param maxMessages The maximum number of messages to receive.
* @param maxWaitTime The time the client waits for receiving a message before it times out.
* @return An {@link IterableStream} of at most {@code maxMessages} messages from the Service Bus entity.
*
* @throws IllegalArgumentException if {@code maxMessages} or {@code maxWaitTime} is zero or a negative value.
*/
public IterableStream<ServiceBusReceivedMessage> receive(int maxMessages, Duration maxWaitTime) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
} else if (maxWaitTime.isNegative() || maxWaitTime.isZero()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'maxWaitTime' cannot be zero or less. maxWaitTime: " + maxWaitTime));
}
final Flux<ServiceBusReceivedMessage> messages = Flux.create(emitter -> queueWork(maxMessages, maxWaitTime,
emitter));
return new IterableStream<>(messages);
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public ServiceBusReceivedMessage receiveDeferredMessage(long sequenceNumber) {
return asyncClient.receiveDeferredMessage(sequenceNumber).block(operationTimeout);
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return An {@link IterableStream} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public IterableStream<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
final Flux<ServiceBusReceivedMessage> messages = asyncClient.receiveDeferredMessageBatch(sequenceNumbers)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Renews the lock on the specified message. The lock will be renewed based on the setting specified on the entity.
* When a message is received in {@link ReceiveMode
* receiver instance for a duration as specified during the Queue creation (LockDuration). If processing of the
* message requires longer than this duration, the lock needs to be renewed. For each renewal, the lock is reset to
* the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Instant renewMessageLock(MessageLockToken lockToken) {
return asyncClient.renewMessageLock(lockToken).block(operationTimeout);
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
asyncClient.close();
}
/**
* Given an {@code emitter}, queues that work in {@link SynchronousMessageSubscriber}. If the synchronous job has
* not been created, will initialise it.
*/
private void queueWork(int maximumMessageCount, Duration maxWaitTime,
FluxSink<ServiceBusReceivedMessage> emitter) {
final long id = idGenerator.getAndIncrement();
final SynchronousReceiveWork work = new SynchronousReceiveWork(id, maximumMessageCount, maxWaitTime,
emitter);
final SynchronousMessageSubscriber syncSubscriber = new SynchronousMessageSubscriber(work);
logger.info("[{}]: Started synchronous message subscriber.", id);
asyncClient.receive().subscribeWith(syncSubscriber);
}
} | class ServiceBusReceiverClient implements AutoCloseable {
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverClient.class);
private final AtomicInteger idGenerator = new AtomicInteger();
private final ServiceBusReceiverAsyncClient asyncClient;
private final Duration operationTimeout;
/**
* Creates a synchronous receiver given its asynchronous counterpart.
*
* @param asyncClient Asynchronous receiver.
*/
ServiceBusReceiverClient(ServiceBusReceiverAsyncClient asyncClient, Duration operationTimeout) {
this.asyncClient = Objects.requireNonNull(asyncClient, "'asyncClient' cannot be null.");
this.operationTimeout = Objects.requireNonNull(operationTimeout, "'operationTimeout' cannot be null.");
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return asyncClient.getFullyQualifiedNamespace();
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return asyncClient.getEntityPath();
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void abandon(MessageLockToken lockToken) {
asyncClient.abandon(lockToken).block(operationTimeout);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
asyncClient.abandon(lockToken, propertiesToModify).block(operationTimeout);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void complete(MessageLockToken lockToken) {
asyncClient.complete(lockToken).block(operationTimeout);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public void defer(MessageLockToken lockToken) {
asyncClient.defer(lockToken).block(operationTimeout);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public void defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
asyncClient.defer(lockToken, propertiesToModify).block(operationTimeout);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public void deadLetter(MessageLockToken lockToken) {
asyncClient.deadLetter(lockToken).block(operationTimeout);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
asyncClient.deadLetter(lockToken, deadLetterOptions).block(operationTimeout);
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public ServiceBusReceivedMessage peek() {
return asyncClient.peek().block(operationTimeout);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public ServiceBusReceivedMessage peekAt(long sequenceNumber) {
return asyncClient.peekAt(sequenceNumber).block(operationTimeout);
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public IterableStream<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = asyncClient.peekBatch(maxMessages)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public IterableStream<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = asyncClient.peekBatchAt(maxMessages, sequenceNumber)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Receives an iterable stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
* The receive operation will wait for a default 1 minute for receiving a message before it times out. You can it
* override by using {@link
*
* @param maxMessages The maximum number of messages to receive.
* @return An {@link IterableStream} of at most {@code maxMessages} messages from the Service Bus entity.
*
* @throws IllegalArgumentException if {@code maxMessages} is zero or a negative value.
*/
/**
* Receives an iterable stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @param maxMessages The maximum number of messages to receive.
* @param maxWaitTime The time the client waits for receiving a message before it times out.
* @return An {@link IterableStream} of at most {@code maxMessages} messages from the Service Bus entity.
*
* @throws IllegalArgumentException if {@code maxMessages} or {@code maxWaitTime} is zero or a negative value.
*/
public IterableStream<ServiceBusReceivedMessage> receive(int maxMessages, Duration maxWaitTime) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
} else if (Objects.isNull(maxWaitTime)) {
throw logger.logExceptionAsError(
new NullPointerException("'maxWaitTime' cannot be null."));
} else if (maxWaitTime.isNegative() || maxWaitTime.isZero()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'maxWaitTime' cannot be zero or less. maxWaitTime: " + maxWaitTime));
}
final Flux<ServiceBusReceivedMessage> messages = Flux.create(emitter -> queueWork(maxMessages, maxWaitTime,
emitter));
return new IterableStream<>(messages);
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public ServiceBusReceivedMessage receiveDeferredMessage(long sequenceNumber) {
return asyncClient.receiveDeferredMessage(sequenceNumber).block(operationTimeout);
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return An {@link IterableStream} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public IterableStream<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
final Flux<ServiceBusReceivedMessage> messages = asyncClient.receiveDeferredMessageBatch(sequenceNumbers)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Renews the lock on the specified message. The lock will be renewed based on the setting specified on the entity.
* When a message is received in {@link ReceiveMode
* receiver instance for a duration as specified during the Queue creation (LockDuration). If processing of the
* message requires longer than this duration, the lock needs to be renewed. For each renewal, the lock is reset to
* the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Instant renewMessageLock(MessageLockToken lockToken) {
return asyncClient.renewMessageLock(lockToken).block(operationTimeout);
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
asyncClient.close();
}
/**
* Given an {@code emitter}, queues that work in {@link SynchronousMessageSubscriber}. If the synchronous job has
* not been created, will initialise it.
*/
private void queueWork(int maximumMessageCount, Duration maxWaitTime,
FluxSink<ServiceBusReceivedMessage> emitter) {
final long id = idGenerator.getAndIncrement();
final SynchronousReceiveWork work = new SynchronousReceiveWork(id, maximumMessageCount, maxWaitTime,
emitter);
final SynchronousMessageSubscriber syncSubscriber = new SynchronousMessageSubscriber(work);
logger.info("[{}]: Started synchronous message subscriber.", id);
asyncClient.receive().subscribeWith(syncSubscriber);
}
} |
Possible NullPointerException if maxWaitTime is null | public IterableStream<ServiceBusReceivedMessage> receive(int maxMessages, Duration maxWaitTime) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
} else if (maxWaitTime.isNegative() || maxWaitTime.isZero()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'maxWaitTime' cannot be zero or less. maxWaitTime: " + maxWaitTime));
}
final Flux<ServiceBusReceivedMessage> messages = Flux.create(emitter -> queueWork(maxMessages, maxWaitTime,
emitter));
return new IterableStream<>(messages);
} | } else if (maxWaitTime.isNegative() || maxWaitTime.isZero()) { | public IterableStream<ServiceBusReceivedMessage> receive(int maxMessages, Duration maxWaitTime) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
} else if (Objects.isNull(maxWaitTime)) {
throw logger.logExceptionAsError(
new NullPointerException("'maxWaitTime' cannot be null."));
} else if (maxWaitTime.isNegative() || maxWaitTime.isZero()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'maxWaitTime' cannot be zero or less. maxWaitTime: " + maxWaitTime));
}
final Flux<ServiceBusReceivedMessage> messages = Flux.create(emitter -> queueWork(maxMessages, maxWaitTime,
emitter));
return new IterableStream<>(messages);
} | class ServiceBusReceiverClient implements AutoCloseable {
private static final Duration DEFAULT_RECEIVE_WAIT_TIME = Duration.ofMinutes(1);
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverClient.class);
private final AtomicInteger idGenerator = new AtomicInteger();
private final ServiceBusReceiverAsyncClient asyncClient;
private final Duration operationTimeout;
/**
* Creates a synchronous receiver given its asynchronous counterpart.
*
* @param asyncClient Asynchronous receiver.
*/
ServiceBusReceiverClient(ServiceBusReceiverAsyncClient asyncClient, Duration operationTimeout) {
this.asyncClient = Objects.requireNonNull(asyncClient, "'asyncClient' cannot be null.");
this.operationTimeout = Objects.requireNonNull(operationTimeout, "'operationTimeout' cannot be null.");
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return asyncClient.getFullyQualifiedNamespace();
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return asyncClient.getEntityPath();
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void abandon(MessageLockToken lockToken) {
asyncClient.abandon(lockToken).block(operationTimeout);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
asyncClient.abandon(lockToken, propertiesToModify).block(operationTimeout);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void complete(MessageLockToken lockToken) {
asyncClient.complete(lockToken).block(operationTimeout);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public void defer(MessageLockToken lockToken) {
asyncClient.defer(lockToken).block(operationTimeout);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public void defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
asyncClient.defer(lockToken, propertiesToModify).block(operationTimeout);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public void deadLetter(MessageLockToken lockToken) {
asyncClient.deadLetter(lockToken).block(operationTimeout);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
asyncClient.deadLetter(lockToken, deadLetterOptions).block(operationTimeout);
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public ServiceBusReceivedMessage peek() {
return asyncClient.peek().block(operationTimeout);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public ServiceBusReceivedMessage peekAt(long sequenceNumber) {
return asyncClient.peekAt(sequenceNumber).block(operationTimeout);
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public IterableStream<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = asyncClient.peekBatch(maxMessages)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public IterableStream<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = asyncClient.peekBatchAt(maxMessages, sequenceNumber)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Receives an iterable stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
* The receive operation will wait for a default 1 minute for receiving a message before it times out. You can it
* override by using {@link
*
* @param maxMessages The maximum number of messages to receive.
* @return An {@link IterableStream} of at most {@code maxMessages} messages from the Service Bus entity.
*
* @throws IllegalArgumentException if {@code maxMessages} is zero or a negative value.
*/
public IterableStream<ServiceBusReceivedMessage> receive(int maxMessages) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = Flux.create(emitter -> queueWork(maxMessages,
DEFAULT_RECEIVE_WAIT_TIME, emitter));
return new IterableStream<>(messages);
}
/**
* Receives an iterable stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @param maxMessages The maximum number of messages to receive.
* @param maxWaitTime The time the client waits for receiving a message before it times out.
* @return An {@link IterableStream} of at most {@code maxMessages} messages from the Service Bus entity.
*
* @throws IllegalArgumentException if {@code maxMessages} or {@code maxWaitTime} is zero or a negative value.
*/
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public ServiceBusReceivedMessage receiveDeferredMessage(long sequenceNumber) {
return asyncClient.receiveDeferredMessage(sequenceNumber).block(operationTimeout);
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return An {@link IterableStream} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public IterableStream<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
final Flux<ServiceBusReceivedMessage> messages = asyncClient.receiveDeferredMessageBatch(sequenceNumbers)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Renews the lock on the specified message. The lock will be renewed based on the setting specified on the entity.
* When a message is received in {@link ReceiveMode
* receiver instance for a duration as specified during the Queue creation (LockDuration). If processing of the
* message requires longer than this duration, the lock needs to be renewed. For each renewal, the lock is reset to
* the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Instant renewMessageLock(MessageLockToken lockToken) {
return asyncClient.renewMessageLock(lockToken).block(operationTimeout);
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
asyncClient.close();
}
/**
* Given an {@code emitter}, queues that work in {@link SynchronousMessageSubscriber}. If the synchronous job has
* not been created, will initialise it.
*/
private void queueWork(int maximumMessageCount, Duration maxWaitTime,
FluxSink<ServiceBusReceivedMessage> emitter) {
final long id = idGenerator.getAndIncrement();
final SynchronousReceiveWork work = new SynchronousReceiveWork(id, maximumMessageCount, maxWaitTime,
emitter);
final SynchronousMessageSubscriber syncSubscriber = new SynchronousMessageSubscriber(work);
logger.info("[{}]: Started synchronous message subscriber.", id);
asyncClient.receive().subscribeWith(syncSubscriber);
}
} | class ServiceBusReceiverClient implements AutoCloseable {
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverClient.class);
private final AtomicInteger idGenerator = new AtomicInteger();
private final ServiceBusReceiverAsyncClient asyncClient;
private final Duration operationTimeout;
/**
* Creates a synchronous receiver given its asynchronous counterpart.
*
* @param asyncClient Asynchronous receiver.
*/
ServiceBusReceiverClient(ServiceBusReceiverAsyncClient asyncClient, Duration operationTimeout) {
this.asyncClient = Objects.requireNonNull(asyncClient, "'asyncClient' cannot be null.");
this.operationTimeout = Objects.requireNonNull(operationTimeout, "'operationTimeout' cannot be null.");
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return asyncClient.getFullyQualifiedNamespace();
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return asyncClient.getEntityPath();
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void abandon(MessageLockToken lockToken) {
asyncClient.abandon(lockToken).block(operationTimeout);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
asyncClient.abandon(lockToken, propertiesToModify).block(operationTimeout);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void complete(MessageLockToken lockToken) {
asyncClient.complete(lockToken).block(operationTimeout);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public void defer(MessageLockToken lockToken) {
asyncClient.defer(lockToken).block(operationTimeout);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public void defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
asyncClient.defer(lockToken, propertiesToModify).block(operationTimeout);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public void deadLetter(MessageLockToken lockToken) {
asyncClient.deadLetter(lockToken).block(operationTimeout);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public void deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
asyncClient.deadLetter(lockToken, deadLetterOptions).block(operationTimeout);
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public ServiceBusReceivedMessage peek() {
return asyncClient.peek().block(operationTimeout);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public ServiceBusReceivedMessage peekAt(long sequenceNumber) {
return asyncClient.peekAt(sequenceNumber).block(operationTimeout);
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public IterableStream<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = asyncClient.peekBatch(maxMessages)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public IterableStream<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (maxMessages <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'maxMessages' cannot be less than or equal to 0. maxMessages: " + maxMessages));
}
final Flux<ServiceBusReceivedMessage> messages = asyncClient.peekBatchAt(maxMessages, sequenceNumber)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Receives an iterable stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
* The receive operation will wait for a default 1 minute for receiving a message before it times out. You can it
* override by using {@link
*
* @param maxMessages The maximum number of messages to receive.
* @return An {@link IterableStream} of at most {@code maxMessages} messages from the Service Bus entity.
*
* @throws IllegalArgumentException if {@code maxMessages} is zero or a negative value.
*/
public IterableStream<ServiceBusReceivedMessage> receive(int maxMessages) {
return receive(maxMessages, operationTimeout);
}
/**
* Receives an iterable stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @param maxMessages The maximum number of messages to receive.
* @param maxWaitTime The time the client waits for receiving a message before it times out.
* @return An {@link IterableStream} of at most {@code maxMessages} messages from the Service Bus entity.
*
* @throws IllegalArgumentException if {@code maxMessages} or {@code maxWaitTime} is zero or a negative value.
*/
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public ServiceBusReceivedMessage receiveDeferredMessage(long sequenceNumber) {
return asyncClient.receiveDeferredMessage(sequenceNumber).block(operationTimeout);
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return An {@link IterableStream} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public IterableStream<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
final Flux<ServiceBusReceivedMessage> messages = asyncClient.receiveDeferredMessageBatch(sequenceNumbers)
.timeout(operationTimeout);
messages.subscribe();
return new IterableStream<>(messages);
}
/**
* Renews the lock on the specified message. The lock will be renewed based on the setting specified on the entity.
* When a message is received in {@link ReceiveMode
* receiver instance for a duration as specified during the Queue creation (LockDuration). If processing of the
* message requires longer than this duration, the lock needs to be renewed. For each renewal, the lock is reset to
* the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Instant renewMessageLock(MessageLockToken lockToken) {
return asyncClient.renewMessageLock(lockToken).block(operationTimeout);
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
asyncClient.close();
}
/**
* Given an {@code emitter}, queues that work in {@link SynchronousMessageSubscriber}. If the synchronous job has
* not been created, will initialise it.
*/
private void queueWork(int maximumMessageCount, Duration maxWaitTime,
FluxSink<ServiceBusReceivedMessage> emitter) {
final long id = idGenerator.getAndIncrement();
final SynchronousReceiveWork work = new SynchronousReceiveWork(id, maximumMessageCount, maxWaitTime,
emitter);
final SynchronousMessageSubscriber syncSubscriber = new SynchronousMessageSubscriber(work);
logger.info("[{}]: Started synchronous message subscriber.", id);
asyncClient.receive().subscribeWith(syncSubscriber);
}
} |
This logic is done in MessageProcessor now. | public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber))
.map(receivedMessage -> {
if (receiveMode == ReceiveMode.PEEK_LOCK && !Objects.isNull(receivedMessage.getLockToken())) {
UUID lockToken = UUID.fromString(receivedMessage.getLockToken());
if (!ZERO_LOCK_TOKEN.equals(lockToken)) {
messageLockContainer.addOrUpdate(lockToken, receivedMessage.getLockedUntil());
}
}
return receivedMessage;
});
} | .map(receivedMessage -> { | public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber))
.map(receivedMessage -> {
if (receiveMode == ReceiveMode.PEEK_LOCK && !CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
receivedMessage.setLockedUntil(messageLockContainer.addOrUpdate(receivedMessage.getLockToken(),
receivedMessage.getLockedUntil()));
}
return receivedMessage;
});
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private static final UUID ZERO_LOCK_TOKEN = new UUID(0L, 0L);
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiveMessageOptions receiveOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers.
* Key: linkName
* Value: consumer associated with that linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiveOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiveMessageOptions receiveOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiveOptions = Objects.requireNonNull(receiveOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiveOptions.getPrefetchCount();
this.receiveMode = receiveOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity.
*
* @return A stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessage> receive() {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && receiveOptions.isAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Autocomplete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(UUID lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final UUID lockToken = UUID.fromString(message.getLockToken());
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor));
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
receiveOptions.isAutoComplete(), receiveOptions.isLockAutoRenewed(),
receiveOptions.getMaxAutoRenewDuration(), connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final ReceiveAsyncOptions defaultReceiveOptions;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers. Key: linkName Value: consumer associated with that
* linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiverOptions receiverOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiverOptions.getPrefetchCount();
this.receiveMode = receiverOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
this.defaultReceiveOptions = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(connectionProcessor.getRetryOptions().getTryTimeout());
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity and completes them
* when they are finished processing.
*
* <p>
* By default, each successfully consumed message is {@link
*
* auto-completion feature will {@link
*
* operation timeout} has elapsed.
* </p>
*
* @return A stream of messages from the Service Bus entity.
* @throws AmqpException if {@link AmqpRetryOptions
* downstream consumers are still processing the message.
*/
public Flux<ServiceBusReceivedMessage> receive() {
return receive(defaultReceiveOptions);
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity with a set of
* options. To disable lock auto-renewal, set {@link ReceiveAsyncOptions
* setMaxAutoRenewDuration} to {@link Duration
*
* @param options Set of options to set when receiving messages.
* @return A stream of messages from the Service Bus entity.
* @throws NullPointerException if {@code options} is null.
* @throws IllegalArgumentException if {@link ReceiveAsyncOptions
* duration} is negative.
*/
public Flux<ServiceBusReceivedMessage> receive(ReceiveAsyncOptions options) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (Objects.isNull(options)) {
return fluxError(logger, new NullPointerException("'options' cannot be null"));
} else if (options.getMaxAutoRenewDuration() != null && options.getMaxAutoRenewDuration().isNegative()) {
return fluxError(logger, new IllegalArgumentException("'maxAutoRenewDuration' cannot be negative."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && options.isEnableAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Auto-complete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath, options)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(String lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final String lockToken = message.getLockToken();
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName, ReceiveAsyncOptions options) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName, null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor, context));
final boolean isAutoLockRenewal = options.getMaxAutoRenewDuration() != null
&& !options.getMaxAutoRenewDuration().isZero();
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
options.isEnableAutoComplete(), isAutoLockRenewal, options.getMaxAutoRenewDuration(),
connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} |
getQueueName(). When running the test if you set the right environment variables, you don't need to make these manual changes. | protected void beforeTest() {
final String queueName = "hemant-test2";
Assertions.assertNotNull(queueName, "'queueName' cannot be null.");
sender = createBuilder().sender().queueName(queueName).buildAsyncClient();
receiver = createBuilder()
.receiver()
.queueName(queueName)
.isAutoComplete(true)
.buildAsyncClient();
receiverManualComplete = createBuilder()
.receiver()
.queueName(queueName)
.isAutoComplete(false)
.buildAsyncClient();
receiveDeleteModeReceiver = createBuilder()
.receiver()
.queueName(queueName)
.isAutoComplete(false)
.receiveMode(ReceiveMode.RECEIVE_AND_DELETE)
.buildAsyncClient();
} | final String queueName = "hemant-test2"; | protected void beforeTest() {
final String queueName = getQueueName();
Assertions.assertNotNull(queueName, "'queueName' cannot be null.");
sender = createBuilder().sender().queueName(queueName).buildAsyncClient();
receiver = createBuilder()
.receiver()
.queueName(queueName)
.buildAsyncClient();
receiverManualComplete = createBuilder()
.receiver()
.queueName(queueName)
.buildAsyncClient();
receiveDeleteModeReceiver = createBuilder()
.receiver()
.queueName(queueName)
.receiveMode(ReceiveMode.RECEIVE_AND_DELETE)
.buildAsyncClient();
} | class ServiceBusReceiverAsyncClientIntegrationTest extends IntegrationTestBase {
private static final String CONTENTS = "Test-contents";
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClientIntegrationTest.class);
private ServiceBusReceiverAsyncClient receiver;
private ServiceBusReceiverAsyncClient receiverManualComplete;
private ServiceBusReceiverAsyncClient receiveDeleteModeReceiver;
private ServiceBusSenderAsyncClient sender;
ServiceBusReceiverAsyncClientIntegrationTest() {
super(new ClientLogger(ServiceBusReceiverAsyncClientIntegrationTest.class));
}
@Override
@Override
protected void afterTest() {
dispose(receiver, receiverManualComplete, receiveDeleteModeReceiver, sender);
}
/**
* Verifies that we can send and receive two messages.
*/
@Disabled("Problem when receiving two messages. Link is closed prematurely.")
@Test
void receiveTwoMessagesAutoComplete() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(sender.send(message))
.thenMany(receiverManualComplete.receive()))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.thenCancel()
.verify();
}
/**
* Verifies that we can send and receive a message.
*/
@Test
void receiveMessageAutoComplete() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).thenMany(receiverManualComplete.receive()))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.thenCancel()
.verify();
}
/**
* Verifies that we can send and peek a message.
*/
@Test
void peekMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(receiver.peek()))
.assertNext(receivedMessage -> {
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can schedule and peek a message.
*/
@Test
void testSendSceduledMessageAndReceive() {
final String messageId = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message = TestUtils.getServiceBusMessage(contents, messageId, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(2);
sender.scheduleMessage(message, scheduledEnqueueTime)
.delaySubscription(Duration.ofSeconds(3))
.block();
StepVerifier.create(receiver.receive().take(1))
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(contents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
Assertions.assertEquals(messageId, receivedMessage.getProperties().get(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can schedule and receive multiple messages.
*/
@Test
void testSendMultipleSceduledMessageAndReceive() {
final String messageId1 = UUID.randomUUID().toString();
final String messageId2 = UUID.randomUUID().toString();
String contents = "Some-contents";
final ServiceBusMessage message1 = TestUtils.getServiceBusMessage(contents, messageId1, 0);
final ServiceBusMessage message2 = TestUtils.getServiceBusMessage(contents, messageId2, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(1);
sender.scheduleMessage(message1, scheduledEnqueueTime)
.block();
sender.scheduleMessage(message2, scheduledEnqueueTime)
.block(Duration.ofSeconds(4));
String finalContents = contents;
StepVerifier.create(receiveDeleteModeReceiver.receive().take(2))
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(finalContents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
Assertions.assertEquals(messageId1, receivedMessage.getProperties().get(MESSAGE_TRACKING_ID));
})
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(finalContents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
Assertions.assertEquals(messageId2, receivedMessage.getProperties().get(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can cancel a scheduled message.
*/
@Test
void testSendSceduledMessageAndCancel() {
final String messageId = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message = TestUtils.getServiceBusMessage(contents, messageId, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(10);
final Duration delayDuration = Duration.ofSeconds(3);
final Long sequenceNumber = sender.scheduleMessage(message, scheduledEnqueueTime).block();
logger.verbose("Scheduled the message, sequence number {}.", sequenceNumber);
Mono.delay(delayDuration)
.then(sender.cancelScheduledMessage(sequenceNumber.longValue()))
.block();
logger.verbose("Cancelled the scheduled message, sequence number {}.", sequenceNumber);
StepVerifier.create(receiver.receive().take(1))
.expectNoEvent(Duration.ofSeconds(5))
.verifyComplete();
}
/**
* Verifies that we can send and peek a message.
*/
@Test
void peekFromSequenceNumberMessage() {
final long fromSequenceNumber = 1;
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(receiver.peekAt(fromSequenceNumber)))
.assertNext(receivedMessage -> {
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can send and peek a batch of messages.
*/
@Test
void peekBatchMessages() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
int maxMessages = 2;
StepVerifier.create(Mono.when(sender.send(message), sender.send(message))
.thenMany(receiver.peekBatch(maxMessages)))
.expectNextCount(maxMessages)
.verifyComplete();
}
/**
* Verifies that we can send and peek a batch of messages.
*/
@Test
void peekBatchMessagesFromSequence() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
int maxMessages = 2;
int fromSequenceNumber = 1;
StepVerifier.create(Mono.when(sender.send(message), sender.send(message))
.thenMany(receiver.peekBatchAt(maxMessages, fromSequenceNumber)))
.expectNextCount(maxMessages)
.verifyComplete();
}
/**
* Verifies that we can deadletter a message.
*/
@Test
void deadLetterMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiverManualComplete.deadLetter(receivedMessage))
.verifyComplete();
}
/**
* Verifies that we can renew message lock.
*/
@Test
void testBasicReceiveAndRenewLock() {
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, "id-1", 0);
final AtomicReference<ServiceBusReceivedMessage> receivedMessage = new AtomicReference<>();
final AtomicReference<Instant> initialLock = new AtomicReference<>();
sender.send(message).block(Duration.ofSeconds(20));
StepVerifier.create(
receiverManualComplete.receive().take(1).map(m -> {
Assertions.assertNotNull(m.getLockedUntil());
receivedMessage.set(m);
initialLock.set(m.getLockedUntil());
return m;
}).then(Mono.delay(Duration.ofSeconds(10))
.then(Mono.defer(() -> receiverManualComplete.renewMessageLock(receivedMessage.get())))))
.assertNext(lockedUntil -> {
Assertions.assertTrue(lockedUntil.isAfter(initialLock.get()),
String.format("Updated lock is not after the initial Lock. updated: [%s]. initial:[%s]",
lockedUntil, initialLock.get()));
Assertions.assertEquals(receivedMessage.get().getLockedUntil(), lockedUntil);
})
.verifyComplete();
}
/**
* Verifies that the lock can be automatically renewed.
*/
@Test
void autoRenewLockOnReceiveMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = getServiceBusMessage(CONTENTS, messageId, 0);
sender.send(message).block(TIMEOUT);
final ServiceBusReceiverAsyncClient receiver = new ServiceBusClientBuilder()
.connectionString(getConnectionString())
.receiver()
.receiveMode(ReceiveMode.PEEK_LOCK)
.isLockAutoRenewed(true)
.queueName(getQueueName())
.maxAutoLockRenewalDuration(Duration.ofSeconds(2))
.buildAsyncClient();
try {
StepVerifier.create(receiver.receive())
.assertNext(received -> {
Assertions.assertNotNull(received.getLockedUntil());
Assertions.assertNotNull(received.getLockToken());
logger.info("{}: lockId[{}]. lockedUntil[{}]",
received.getSequenceNumber(), received.getLockToken(), received.getLockedUntil());
final Instant initial = received.getLockedUntil();
Instant latest = Instant.MIN;
for (int i = 0; i < 3; i++) {
try {
TimeUnit.SECONDS.sleep(15);
} catch (InterruptedException error) {
logger.error("Error occurred while sleeping: " + error);
}
Assertions.assertNotNull(received.getLockedUntil());
latest = received.getLockedUntil();
}
Assertions.assertTrue(initial.isBefore(latest),
String.format("Latest should be after initial. initial: %s. latest: %s", initial, latest));
logger.info("Completing message.");
receiver.complete(received).block(Duration.ofSeconds(15));
})
.thenCancel()
.verify();
} finally {
receiver.close();
}
}
@Test
void testBasicReceiveAndDeleteWithBinaryData() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64);
StepVerifier.create(sender.send(message).thenMany(receiveDeleteModeReceiver.receive()))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.expectNoEvent(Duration.ofSeconds(2))
.thenCancel()
.verify();
}
@Test
void testBasicReceiveAndCompleteWithLargeBinaryData() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
StepVerifier.create(sender.send(message).thenMany(receiveDeleteModeReceiver.receive()))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.expectNoEvent(Duration.ofSeconds(2))
.thenCancel()
.verify();
}
@Test
void testBasicReceiveAndComplete() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiverManualComplete.complete(receivedMessage))
.verifyComplete();
}
@Test
void testBasicReceiveAndCompleteMessageWithProperties() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
StepVerifier.create(receiverManualComplete.complete(receivedMessage))
.verifyComplete();
}
@Test
void testBasicReceiveAndAbandon() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiverManualComplete.abandon(receivedMessage))
.verifyComplete();
}
@Test
void testBasicReceiveAndDeadLetter() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiverManualComplete.deadLetter(receivedMessage))
.verifyComplete();
}
@Test
void testReceiveBySequenceNumberAndDeadletter() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(Duration.ofSeconds(30));
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.deadLetter(receivedDeferredMessage).block(Duration.ofSeconds(30));
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(Duration.ofSeconds(2));
}
@Test
void testReceiveBySequenceNumberAndAbandon() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(Duration.ofSeconds(30));
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.abandon(receivedDeferredMessage).block(Duration.ofSeconds(30));
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(Duration.ofSeconds(2));
}
@Test
void testSendReceiveMessageWithVariousPropertyTypes() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage messageToSend = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
Map<String, Object> sentProperties = messageToSend.getProperties();
sentProperties.put("NullProperty", null);
sentProperties.put("BooleanProperty", true);
sentProperties.put("ByteProperty", (byte) 1);
sentProperties.put("ShortProperty", (short) 2);
sentProperties.put("IntProperty", 3);
sentProperties.put("LongProperty", 4L);
sentProperties.put("FloatProperty", 5.5f);
sentProperties.put("DoubleProperty", 6.6f);
sentProperties.put("CharProperty", 'z');
sentProperties.put("UUIDProperty", UUID.randomUUID());
sentProperties.put("StringProperty", "string");
sender.send(messageToSend).block(Duration.ofSeconds(30));
StepVerifier.create(receiveDeleteModeReceiver.receive())
.assertNext(receivedMessage -> {
Map<String, Object> receivedProperties = receivedMessage.getProperties();
for (Map.Entry<String, Object> sentEntry : sentProperties.entrySet()) {
if (sentEntry.getValue() != null && sentEntry.getValue().getClass().isArray()) {
Assertions.assertArrayEquals((Object[]) sentEntry.getValue(), (Object[]) receivedProperties.get(sentEntry.getKey()));
} else {
Assertions.assertEquals(sentEntry.getValue(), receivedProperties.get(sentEntry.getKey()));
}
}
})
.expectNoEvent(Duration.ofSeconds(2))
.thenCancel()
.verify();
}
@Test
void testReceiveBySequenceNumberAndComplete() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessageBinary(messageTrackingId, 0, 64 * 1024);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(Duration.ofSeconds(30));
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.complete(receivedDeferredMessage).block(Duration.ofSeconds(30));
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(Duration.ofSeconds(2));
}
/**
* Verifies that we can send multiple scheduled message, cancel one and receive all other scheduled messages .
*/
@Test
void testSendMultipleSceduledMessageAndCancel() {
final String trackingId1 = UUID.randomUUID().toString();
final String trackingId2 = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message1 = TestUtils.getServiceBusMessage(contents, trackingId1, 0);
final ServiceBusMessage message2 = TestUtils.getServiceBusMessage(contents, trackingId2, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(10);
final Duration subscriptionDelayDuration = Duration.ofSeconds(3);
sender.scheduleMessage(message1, scheduledEnqueueTime).block();
final Long sequenceNumber2 = sender.scheduleMessage(message2, scheduledEnqueueTime).block();
sender.cancelScheduledMessage(sequenceNumber2.longValue()).block();
StepVerifier.create(receiveDeleteModeReceiver.receive().delaySubscription(subscriptionDelayDuration).take(1))
.assertNext(receivedMessage ->
Assertions.assertEquals(receivedMessage.getProperties().get(MESSAGE_TRACKING_ID).toString(), trackingId1))
.thenCancel()
.verify();
}
} | class ServiceBusReceiverAsyncClientIntegrationTest extends IntegrationTestBase {
private static final String CONTENTS = "Test-contents";
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClientIntegrationTest.class);
private ServiceBusReceiverAsyncClient receiver;
private ServiceBusReceiverAsyncClient receiverManualComplete;
private ServiceBusReceiverAsyncClient receiveDeleteModeReceiver;
private ServiceBusSenderAsyncClient sender;
ServiceBusReceiverAsyncClientIntegrationTest() {
super(new ClientLogger(ServiceBusReceiverAsyncClientIntegrationTest.class));
}
@Override
@Override
protected void afterTest() {
dispose(receiver, receiverManualComplete, receiveDeleteModeReceiver, sender);
}
/**
* Verifies that we can send and receive two messages.
*/
@Disabled("Problem when receiving two messages. Link is closed prematurely.")
@Test
void receiveTwoMessagesAutoComplete() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).then(sender.send(message))
.thenMany(receiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.thenCancel()
.verify();
}
/**
* Verifies that we can send and receive a message.
*/
@Test
void receiveMessageAutoComplete() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).thenMany(receiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.thenCancel()
.verify();
}
/**
* Verifies that we can send and peek a message.
*/
@Test
void peekMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(receiver.peek()))
.assertNext(receivedMessage -> {
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can schedule and receive a message.
*/
@Test
void sendScheduledMessageAndReceive() {
final String messageId = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message = TestUtils.getServiceBusMessage(contents, messageId, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(2);
sender.scheduleMessage(message, scheduledEnqueueTime)
.delaySubscription(Duration.ofSeconds(3))
.block();
StepVerifier.create(receiver.receive().take(1))
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(contents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can schedule and receive multiple messages.
*/
@Test
void sendMultipleScheduledMessageAndReceive() {
final String messageId1 = UUID.randomUUID().toString();
final String messageId2 = UUID.randomUUID().toString();
String contents = "Some-contents";
final ServiceBusMessage message1 = TestUtils.getServiceBusMessage(contents, messageId1, 0);
final ServiceBusMessage message2 = TestUtils.getServiceBusMessage(contents, messageId2, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(1);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
sender.scheduleMessage(message1, scheduledEnqueueTime)
.block(TIMEOUT);
sender.scheduleMessage(message2, scheduledEnqueueTime)
.block(TIMEOUT);
String finalContents = contents;
StepVerifier.create(receiveDeleteModeReceiver.receive(options).take(2))
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(finalContents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(finalContents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can cancel a scheduled message.
*/
@Test
void cancelScheduledMessage() {
final String messageId = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message = TestUtils.getServiceBusMessage(contents, messageId, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(10);
final Duration delayDuration = Duration.ofSeconds(3);
final Long sequenceNumber = sender.scheduleMessage(message, scheduledEnqueueTime).block();
logger.verbose("Scheduled the message, sequence number {}.", sequenceNumber);
Assertions.assertNotNull(sequenceNumber);
Mono.delay(delayDuration)
.then(sender.cancelScheduledMessage(sequenceNumber))
.block();
logger.verbose("Cancelled the scheduled message, sequence number {}.", sequenceNumber);
StepVerifier.create(receiver.receive().take(1))
.expectNoEvent(Duration.ofSeconds(5))
.verifyComplete();
}
/**
* Verifies that we can send and peek a message.
*/
@Test
void peekFromSequenceNumberMessage() {
final long fromSequenceNumber = 1;
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(receiver.peekAt(fromSequenceNumber)))
.assertNext(receivedMessage -> {
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can send and peek a batch of messages.
*/
@Test
void peekBatchMessages() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
int maxMessages = 2;
StepVerifier.create(Mono.when(sender.send(message), sender.send(message))
.thenMany(receiver.peekBatch(maxMessages)))
.expectNextCount(maxMessages)
.verifyComplete();
}
/**
* Verifies that we can send and peek a batch of messages.
*/
@Test
void peekBatchMessagesFromSequence() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
int maxMessages = 2;
int fromSequenceNumber = 1;
StepVerifier.create(Mono.when(sender.send(message), sender.send(message))
.thenMany(receiver.peekBatchAt(maxMessages, fromSequenceNumber)))
.expectNextCount(maxMessages)
.verifyComplete();
}
/**
* Verifies that we can deadletter a message.
*/
@Test
void deadLetterMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiver.receive(options).next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiver.deadLetter(receivedMessage))
.verifyComplete();
}
/**
* Verifies that we can renew message lock.
*/
@Test
void receiveAndRenewLock() {
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, "id-1", 0);
final AtomicReference<ServiceBusReceivedMessage> receivedMessage = new AtomicReference<>();
final AtomicReference<Instant> initialLock = new AtomicReference<>();
final ReceiveAsyncOptions options = new ReceiveAsyncOptions()
.setEnableAutoComplete(false)
.setMaxAutoRenewDuration(null);
sender.send(message).block(TIMEOUT);
ServiceBusReceivedMessage m = receiver.receive(options).next().block(TIMEOUT);
Assertions.assertNotNull(m);
Assertions.assertNotNull(m.getLockedUntil());
receivedMessage.set(m);
initialLock.set(m.getLockedUntil());
StepVerifier.create(Mono.delay(Duration.ofSeconds(10))
.then(Mono.defer(() -> receiver.renewMessageLock(receivedMessage.get()))))
.assertNext(lockedUntil -> {
Assertions.assertTrue(lockedUntil.isAfter(initialLock.get()),
String.format("Updated lock is not after the initial Lock. updated: [%s]. initial:[%s]",
lockedUntil, initialLock.get()));
Assertions.assertEquals(receivedMessage.get().getLockedUntil(), lockedUntil);
})
.verifyComplete();
}
/**
* Verifies that the lock can be automatically renewed.
*/
@Test
void autoRenewLockOnReceiveMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(Duration.ofSeconds(2));
sender.send(message).block(TIMEOUT);
final ServiceBusReceiverAsyncClient receiver = new ServiceBusClientBuilder()
.connectionString(getConnectionString())
.receiver()
.receiveMode(ReceiveMode.PEEK_LOCK)
.queueName(getQueueName())
.buildAsyncClient();
try {
StepVerifier.create(receiver.receive(options))
.assertNext(received -> {
Assertions.assertNotNull(received.getLockedUntil());
Assertions.assertNotNull(received.getLockToken());
logger.info("{}: lockId[{}]. lockedUntil[{}]",
received.getSequenceNumber(), received.getLockToken(), received.getLockedUntil());
final Instant initial = received.getLockedUntil();
Instant latest = Instant.MIN;
for (int i = 0; i < 3; i++) {
try {
TimeUnit.SECONDS.sleep(15);
} catch (InterruptedException error) {
logger.error("Error occurred while sleeping: " + error);
}
Assertions.assertNotNull(received.getLockedUntil());
latest = received.getLockedUntil();
}
Assertions.assertTrue(initial.isBefore(latest),
String.format("Latest should be after initial. initial: %s. latest: %s", initial, latest));
logger.info("Completing message.");
receiver.complete(received).block(Duration.ofSeconds(15));
})
.thenCancel()
.verify();
} finally {
receiver.close();
}
}
@Test
void receiveAndDeleteWithBinaryData() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).thenMany(receiveDeleteModeReceiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.expectNoEvent(timeout)
.thenCancel()
.verify();
}
@Test
void receiveAndAbandon() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiverManualComplete.abandon(receivedMessage))
.verifyComplete();
}
@Test
void receiveBySequenceNumberAndDeadletter() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive(options).next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(TIMEOUT);
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.deadLetter(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
}
@Test
void receiveBySequenceNumberAndAbandon() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive(options).next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(TIMEOUT);
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.abandon(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
}
@Test
void sendReceiveMessageWithVariousPropertyTypes() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage messageToSend = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
Map<String, Object> sentProperties = messageToSend.getProperties();
sentProperties.put("NullProperty", null);
sentProperties.put("BooleanProperty", true);
sentProperties.put("ByteProperty", (byte) 1);
sentProperties.put("ShortProperty", (short) 2);
sentProperties.put("IntProperty", 3);
sentProperties.put("LongProperty", 4L);
sentProperties.put("FloatProperty", 5.5f);
sentProperties.put("DoubleProperty", 6.6f);
sentProperties.put("CharProperty", 'z');
sentProperties.put("UUIDProperty", UUID.randomUUID());
sentProperties.put("StringProperty", "string");
sender.send(messageToSend).block(TIMEOUT);
StepVerifier.create(receiveDeleteModeReceiver.receive(options))
.assertNext(receivedMessage -> {
Map<String, Object> receivedProperties = receivedMessage.getProperties();
for (Map.Entry<String, Object> sentEntry : sentProperties.entrySet()) {
if (sentEntry.getValue() != null && sentEntry.getValue().getClass().isArray()) {
Assertions.assertArrayEquals((Object[]) sentEntry.getValue(), (Object[]) receivedProperties.get(sentEntry.getKey()));
} else {
Assertions.assertEquals(sentEntry.getValue(), receivedProperties.get(sentEntry.getKey()));
}
}
})
.expectNoEvent(timeout)
.thenCancel()
.verify();
}
@Test
void receiveBySequenceNumberAndComplete() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive(options).next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(Duration.ofSeconds(30));
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.complete(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
}
} |
String.matches() way of matching regex to String has a good perf cost. String matches forces the input regex to always first get compiled via a Pattern.compile() and doing it on every call comes with a perf cost. There is an alternative to String.matches and retain the perf, updated with that logic. | private boolean isRefreshTokenString(String str) {
for (int i = 0; i < str.length(); i++) {
char ch = str.charAt(i);
if ((ch < '0' || ch > '9') && (ch < 'A' || ch > 'Z') && (ch < 'a' || ch > 'z')
&& ch != '_' && ch != '-' && ch != '.') {
return false;
}
}
return true;
} | return true; | private boolean isRefreshTokenString(String str) {
return REFRESH_TOKEN_PATTERN.matcher(str).matches();
} | class VisualStudioCacheAccessor {
private static final String PLATFORM_NOT_SUPPORTED_ERROR = "Platform could not be determined for VS Code"
+ " credential authentication.";
private final ClientLogger logger = new ClientLogger(VisualStudioCacheAccessor.class);
/**
* Creates an instance of {@link VisualStudioCacheAccessor}
*/
public VisualStudioCacheAccessor() { }
private JsonNode getUserSettings() {
JsonNode output = null;
String homeDir = System.getProperty("user.home");
String settingsPath = "";
ObjectMapper mapper = new ObjectMapper();
try {
if (Platform.isWindows()) {
settingsPath = Paths.get(System.getenv("APPDATA"), "Code", "User", "settings.json")
.toString();
} else if (Platform.isMac()) {
settingsPath = Paths.get(homeDir, "Library",
"Application Support", "Code", "User", "settings.json").toString();
} else if (Platform.isLinux()) {
settingsPath = Paths.get(homeDir, ".config", "Code", "User", "settings.json")
.toString();
} else {
throw logger.logExceptionAsError(
new CredentialUnavailableException(PLATFORM_NOT_SUPPORTED_ERROR));
}
File settingsFile = new File(settingsPath);
output = mapper.readTree(settingsFile);
} catch (Exception e) {
return null;
}
return output;
}
/**
* Get the user configured settings of Visual Studio code.
*
* @param tenantId the user specified tenant id.
* @return a Map containing Vs Code user settings
*/
public Map<String, String> getUserSettingsDetails(String tenantId) {
JsonNode userSettings = getUserSettings();
Map<String, String> details = new HashMap<>();
String tenant = tenantId;
String cloud = "Azure";
if (!userSettings.isNull()) {
if (userSettings.has("azure.tenant") && CoreUtils.isNullOrEmpty(tenant)) {
tenant = userSettings.get("azure.tenant").asText();
}
if (userSettings.has("azure.cloud")) {
cloud = userSettings.get("azure.cloud").asText();
}
}
details.put("tenant", tenant);
details.put("cloud", cloud);
return details;
}
/**
* Get the credential for the specified service and account name.
*
* @param serviceName the name of the service to lookup.
* @param accountName the account of the service to lookup.
* @return the credential.
*/
public String getCredentials(String serviceName, String accountName) {
String credential;
if (Platform.isWindows()) {
try {
WindowsCredentialAccessor winCredAccessor =
new WindowsCredentialAccessor(serviceName, accountName);
credential = winCredAccessor.read();
} catch (RuntimeException e) {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"Failed to read Vs Code credentials from Windows Credential API.", e));
}
} else if (Platform.isMac()) {
try {
KeyChainAccessor keyChainAccessor = new KeyChainAccessor(null,
serviceName, accountName);
byte[] readCreds = keyChainAccessor.read();
credential = new String(readCreds, StandardCharsets.UTF_8);
} catch (RuntimeException e) {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"Failed to read Vs Code credentials from Mac Native Key Chain.", e));
}
} else if (Platform.isLinux()) {
try {
LinuxKeyRingAccessor keyRingAccessor = new LinuxKeyRingAccessor(
"org.freedesktop.Secret.Generic", "service",
serviceName, "account", accountName);
byte[] readCreds = keyRingAccessor.read();
credential = new String(readCreds, StandardCharsets.UTF_8);
} catch (RuntimeException e) {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"Failed to read Vs Code credentials from Linux Key Ring.", e));
}
} else {
throw logger.logExceptionAsError(
new CredentialUnavailableException(PLATFORM_NOT_SUPPORTED_ERROR));
}
if (CoreUtils.isNullOrEmpty(credential) || !isRefreshTokenString(credential)) {
throw logger.logExceptionAsError(
new CredentialUnavailableException("Please authenticate via Azure Tools plugin in VS Code IDE."));
}
return credential;
}
/**
* Get the auth host of the specified {@code azureEnvironment}.
*
* @return the auth host.
*/
public String getAzureAuthHost(String cloud) {
switch (cloud) {
case "Azure":
return KnownAuthorityHosts.AZURE_CLOUD;
case "AzureChina":
return KnownAuthorityHosts.AZURE_CHINA_CLOUD;
case "AzureGermanCloud":
return KnownAuthorityHosts.AZURE_GERMAN_CLOUD;
case "AzureUSGovernment":
return KnownAuthorityHosts.AZURE_US_GOVERNMENT;
default:
return KnownAuthorityHosts.AZURE_CLOUD;
}
}
} | class VisualStudioCacheAccessor {
private static final String PLATFORM_NOT_SUPPORTED_ERROR = "Platform could not be determined for VS Code"
+ " credential authentication.";
private final ClientLogger logger = new ClientLogger(VisualStudioCacheAccessor.class);
private static final Pattern REFRESH_TOKEN_PATTERN = Pattern.compile("^[-_.a-zA-Z0-9]+$");
private JsonNode getUserSettings() {
JsonNode output = null;
String homeDir = System.getProperty("user.home");
String settingsPath = "";
ObjectMapper mapper = new ObjectMapper();
try {
if (Platform.isWindows()) {
settingsPath = Paths.get(System.getenv("APPDATA"), "Code", "User", "settings.json")
.toString();
} else if (Platform.isMac()) {
settingsPath = Paths.get(homeDir, "Library",
"Application Support", "Code", "User", "settings.json").toString();
} else if (Platform.isLinux()) {
settingsPath = Paths.get(homeDir, ".config", "Code", "User", "settings.json")
.toString();
} else {
throw logger.logExceptionAsError(
new CredentialUnavailableException(PLATFORM_NOT_SUPPORTED_ERROR));
}
File settingsFile = new File(settingsPath);
output = mapper.readTree(settingsFile);
} catch (Exception e) {
return null;
}
return output;
}
/**
* Get the user configured settings of Visual Studio code.
*
* @param tenantId the user specified tenant id.
* @return a Map containing Vs Code user settings
*/
public Map<String, String> getUserSettingsDetails(String tenantId) {
JsonNode userSettings = getUserSettings();
Map<String, String> details = new HashMap<>();
String tenant = tenantId;
String cloud = "Azure";
if (userSettings != null && !userSettings.isNull()) {
if (userSettings.has("azure.tenant") && CoreUtils.isNullOrEmpty(tenant)) {
tenant = userSettings.get("azure.tenant").asText();
}
if (userSettings.has("azure.cloud")) {
cloud = userSettings.get("azure.cloud").asText();
}
}
details.put("tenant", tenant);
details.put("cloud", cloud);
return details;
}
/**
* Get the credential for the specified service and account name.
*
* @param serviceName the name of the service to lookup.
* @param accountName the account of the service to lookup.
* @return the credential.
*/
public String getCredentials(String serviceName, String accountName) {
String credential;
if (Platform.isWindows()) {
try {
WindowsCredentialAccessor winCredAccessor =
new WindowsCredentialAccessor(serviceName, accountName);
credential = winCredAccessor.read();
} catch (RuntimeException e) {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"Failed to read Vs Code credentials from Windows Credential API.", e));
}
} else if (Platform.isMac()) {
try {
KeyChainAccessor keyChainAccessor = new KeyChainAccessor(null,
serviceName, accountName);
byte[] readCreds = keyChainAccessor.read();
credential = new String(readCreds, StandardCharsets.UTF_8);
} catch (RuntimeException e) {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"Failed to read Vs Code credentials from Mac Native Key Chain.", e));
}
} else if (Platform.isLinux()) {
try {
LinuxKeyRingAccessor keyRingAccessor = new LinuxKeyRingAccessor(
"org.freedesktop.Secret.Generic", "service",
serviceName, "account", accountName);
byte[] readCreds = keyRingAccessor.read();
credential = new String(readCreds, StandardCharsets.UTF_8);
} catch (RuntimeException e) {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"Failed to read Vs Code credentials from Linux Key Ring.", e));
}
} else {
throw logger.logExceptionAsError(
new CredentialUnavailableException(PLATFORM_NOT_SUPPORTED_ERROR));
}
if (CoreUtils.isNullOrEmpty(credential) || !isRefreshTokenString(credential)) {
throw logger.logExceptionAsError(
new CredentialUnavailableException("Please authenticate via Azure Tools plugin in VS Code IDE."));
}
return credential;
}
/**
* Get the auth host of the specified {@code azureEnvironment}.
*
* @return the auth host.
*/
public String getAzureAuthHost(String cloud) {
switch (cloud) {
case "Azure":
return KnownAuthorityHosts.AZURE_CLOUD;
case "AzureChina":
return KnownAuthorityHosts.AZURE_CHINA_CLOUD;
case "AzureGermanCloud":
return KnownAuthorityHosts.AZURE_GERMAN_CLOUD;
case "AzureUSGovernment":
return KnownAuthorityHosts.AZURE_US_GOVERNMENT;
default:
return KnownAuthorityHosts.AZURE_CLOUD;
}
}
} |
It won't be zero lock token. It'll be null or empty. They had zero lock token because they used uuid before. | public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber))
.map(receivedMessage -> {
if (receiveMode == ReceiveMode.PEEK_LOCK && !CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
if (!ZERO_LOCK_TOKEN.equals(UUID.fromString(receivedMessage.getLockToken()))) {
messageLockContainer.addOrUpdate(receivedMessage.getLockToken(), receivedMessage.getLockedUntil());
}
}
return receivedMessage;
});
} | if (receiveMode == ReceiveMode.PEEK_LOCK && !CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) { | public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber))
.map(receivedMessage -> {
if (receiveMode == ReceiveMode.PEEK_LOCK && !CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
receivedMessage.setLockedUntil(messageLockContainer.addOrUpdate(receivedMessage.getLockToken(),
receivedMessage.getLockedUntil()));
}
return receivedMessage;
});
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private static final UUID ZERO_LOCK_TOKEN = new UUID(0L, 0L);
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final ReceiveAsyncOptions defaultReceiveOptions;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers. Key: linkName Value: consumer associated with that
* linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiverOptions receiverOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiverOptions.getPrefetchCount();
this.receiveMode = receiverOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
this.defaultReceiveOptions = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(connectionProcessor.getRetryOptions().getTryTimeout());
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity and completes them
* when they are finished processing.
*
* <p>
* By default, each successfully consumed message is {@link
*
* auto-completion feature will {@link
*
* operation timeout} has elapsed.
* </p>
*
* @return A stream of messages from the Service Bus entity.
* @throws AmqpException if {@link AmqpRetryOptions
* downstream consumers are still processing the message.
*/
public Flux<ServiceBusReceivedMessage> receive() {
return receive(defaultReceiveOptions);
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity with a set of
* options. To disable lock auto-renewal, set {@link ReceiveAsyncOptions
* setMaxAutoRenewDuration} to {@link Duration
*
* @param options Set of options to set when receiving messages.
* @return A stream of messages from the Service Bus entity.
* @throws NullPointerException if {@code options} is null.
* @throws IllegalArgumentException if {@link ReceiveAsyncOptions
* duration} is negative.
*/
public Flux<ServiceBusReceivedMessage> receive(ReceiveAsyncOptions options) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (Objects.isNull(options)) {
return fluxError(logger, new NullPointerException("'options' cannot be null"));
} else if (options.getMaxAutoRenewDuration() != null && options.getMaxAutoRenewDuration().isNegative()) {
return fluxError(logger, new IllegalArgumentException("'maxAutoRenewDuration' cannot be negative."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && options.isEnableAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Auto-complete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath, options)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(String lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final String lockToken = message.getLockToken();
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName, ReceiveAsyncOptions options) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName, null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor, context));
final boolean isAutoLockRenewal = options.getMaxAutoRenewDuration() != null
&& !options.getMaxAutoRenewDuration().isZero();
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
options.isEnableAutoComplete(), isAutoLockRenewal, options.getMaxAutoRenewDuration(),
connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final ReceiveAsyncOptions defaultReceiveOptions;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers. Key: linkName Value: consumer associated with that
* linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiverOptions receiverOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiverOptions.getPrefetchCount();
this.receiveMode = receiverOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
this.defaultReceiveOptions = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(connectionProcessor.getRetryOptions().getTryTimeout());
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity and completes them
* when they are finished processing.
*
* <p>
* By default, each successfully consumed message is {@link
*
* auto-completion feature will {@link
*
* operation timeout} has elapsed.
* </p>
*
* @return A stream of messages from the Service Bus entity.
* @throws AmqpException if {@link AmqpRetryOptions
* downstream consumers are still processing the message.
*/
public Flux<ServiceBusReceivedMessage> receive() {
return receive(defaultReceiveOptions);
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity with a set of
* options. To disable lock auto-renewal, set {@link ReceiveAsyncOptions
* setMaxAutoRenewDuration} to {@link Duration
*
* @param options Set of options to set when receiving messages.
* @return A stream of messages from the Service Bus entity.
* @throws NullPointerException if {@code options} is null.
* @throws IllegalArgumentException if {@link ReceiveAsyncOptions
* duration} is negative.
*/
public Flux<ServiceBusReceivedMessage> receive(ReceiveAsyncOptions options) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (Objects.isNull(options)) {
return fluxError(logger, new NullPointerException("'options' cannot be null"));
} else if (options.getMaxAutoRenewDuration() != null && options.getMaxAutoRenewDuration().isNegative()) {
return fluxError(logger, new IllegalArgumentException("'maxAutoRenewDuration' cannot be negative."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && options.isEnableAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Auto-complete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath, options)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(String lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final String lockToken = message.getLockToken();
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName, ReceiveAsyncOptions options) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName, null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor, context));
final boolean isAutoLockRenewal = options.getMaxAutoRenewDuration() != null
&& !options.getMaxAutoRenewDuration().isZero();
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
options.isEnableAutoComplete(), isAutoLockRenewal, options.getMaxAutoRenewDuration(),
connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} |
You need to format this document. Ctrl+Alt+L. The spacing is off. | void receiveBySequenceNumberAndAbandon() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(TIMEOUT);
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.abandon(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
} | final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete | void receiveBySequenceNumberAndAbandon() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive(options).next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(TIMEOUT);
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.abandon(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
} | class ServiceBusReceiverAsyncClientIntegrationTest extends IntegrationTestBase {
private static final String CONTENTS = "Test-contents";
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClientIntegrationTest.class);
private ServiceBusReceiverAsyncClient receiver;
private ServiceBusReceiverAsyncClient receiverManualComplete;
private ServiceBusReceiverAsyncClient receiveDeleteModeReceiver;
private ServiceBusSenderAsyncClient sender;
ServiceBusReceiverAsyncClientIntegrationTest() {
super(new ClientLogger(ServiceBusReceiverAsyncClientIntegrationTest.class));
}
@Override
protected void beforeTest() {
final String queueName = getQueueName();
Assertions.assertNotNull(queueName, "'queueName' cannot be null.");
sender = createBuilder().sender().queueName(queueName).buildAsyncClient();
receiver = createBuilder()
.receiver()
.queueName(queueName)
.buildAsyncClient();
receiverManualComplete = createBuilder()
.receiver()
.queueName(queueName)
.buildAsyncClient();
receiveDeleteModeReceiver = createBuilder()
.receiver()
.queueName(queueName)
.receiveMode(ReceiveMode.RECEIVE_AND_DELETE)
.buildAsyncClient();
}
@Override
protected void afterTest() {
dispose(receiver, receiverManualComplete, receiveDeleteModeReceiver, sender);
}
/**
* Verifies that we can send and receive two messages.
*/
@Disabled("Problem when receiving two messages. Link is closed prematurely.")
@Test
void receiveTwoMessagesAutoComplete() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).then(sender.send(message))
.thenMany(receiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.thenCancel()
.verify();
}
/**
* Verifies that we can send and receive a message.
*/
@Test
void receiveMessageAutoComplete() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).thenMany(receiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.thenCancel()
.verify();
}
/**
* Verifies that we can send and peek a message.
*/
@Test
void peekMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(receiver.peek()))
.assertNext(receivedMessage -> {
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can schedule and receive a message.
*/
@Test
void sendScheduledMessageAndReceive() {
final String messageId = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message = TestUtils.getServiceBusMessage(contents, messageId, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(2);
sender.scheduleMessage(message, scheduledEnqueueTime)
.delaySubscription(Duration.ofSeconds(3))
.block();
StepVerifier.create(receiver.receive().take(1))
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(contents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can schedule and receive multiple messages.
*/
@Test
void sendMultipleScheduledMessageAndReceive() {
final String messageId1 = UUID.randomUUID().toString();
final String messageId2 = UUID.randomUUID().toString();
String contents = "Some-contents";
final ServiceBusMessage message1 = TestUtils.getServiceBusMessage(contents, messageId1, 0);
final ServiceBusMessage message2 = TestUtils.getServiceBusMessage(contents, messageId2, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(1);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
sender.scheduleMessage(message1, scheduledEnqueueTime)
.block(TIMEOUT);
sender.scheduleMessage(message2, scheduledEnqueueTime)
.block(TIMEOUT);
String finalContents = contents;
StepVerifier.create(receiveDeleteModeReceiver.receive(options).take(2))
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(finalContents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(finalContents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can cancel a scheduled message.
*/
@Test
void cancelScheduledMessage() {
final String messageId = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message = TestUtils.getServiceBusMessage(contents, messageId, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(10);
final Duration delayDuration = Duration.ofSeconds(3);
final Long sequenceNumber = sender.scheduleMessage(message, scheduledEnqueueTime).block();
logger.verbose("Scheduled the message, sequence number {}.", sequenceNumber);
Assertions.assertNotNull(sequenceNumber);
Mono.delay(delayDuration)
.then(sender.cancelScheduledMessage(sequenceNumber))
.block();
logger.verbose("Cancelled the scheduled message, sequence number {}.", sequenceNumber);
StepVerifier.create(receiver.receive().take(1))
.expectNoEvent(Duration.ofSeconds(5))
.verifyComplete();
}
/**
* Verifies that we can send and peek a message.
*/
@Test
void peekFromSequenceNumberMessage() {
final long fromSequenceNumber = 1;
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(receiver.peekAt(fromSequenceNumber)))
.assertNext(receivedMessage -> {
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can send and peek a batch of messages.
*/
@Test
void peekBatchMessages() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
int maxMessages = 2;
StepVerifier.create(Mono.when(sender.send(message), sender.send(message))
.thenMany(receiver.peekBatch(maxMessages)))
.expectNextCount(maxMessages)
.verifyComplete();
}
/**
* Verifies that we can send and peek a batch of messages.
*/
@Test
void peekBatchMessagesFromSequence() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
int maxMessages = 2;
int fromSequenceNumber = 1;
StepVerifier.create(Mono.when(sender.send(message), sender.send(message))
.thenMany(receiver.peekBatchAt(maxMessages, fromSequenceNumber)))
.expectNextCount(maxMessages)
.verifyComplete();
}
/**
* Verifies that we can deadletter a message.
*/
@Test
void deadLetterMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiver.receive(options).next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiver.deadLetter(receivedMessage))
.verifyComplete();
}
/**
* Verifies that we can renew message lock.
*/
@Test
void receiveAndRenewLock() {
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, "id-1", 0);
final AtomicReference<ServiceBusReceivedMessage> receivedMessage = new AtomicReference<>();
final AtomicReference<Instant> initialLock = new AtomicReference<>();
final ReceiveAsyncOptions options = new ReceiveAsyncOptions()
.setEnableAutoComplete(false)
.setMaxAutoRenewDuration(null);
sender.send(message).block(TIMEOUT);
ServiceBusReceivedMessage m = receiver.receive(options).next().block(TIMEOUT);
Assertions.assertNotNull(m);
Assertions.assertNotNull(m.getLockedUntil());
receivedMessage.set(m);
initialLock.set(m.getLockedUntil());
StepVerifier.create(Mono.delay(Duration.ofSeconds(10))
.then(Mono.defer(() -> receiver.renewMessageLock(receivedMessage.get()))))
.assertNext(lockedUntil -> {
Assertions.assertTrue(lockedUntil.isAfter(initialLock.get()),
String.format("Updated lock is not after the initial Lock. updated: [%s]. initial:[%s]",
lockedUntil, initialLock.get()));
Assertions.assertEquals(receivedMessage.get().getLockedUntil(), lockedUntil);
})
.verifyComplete();
}
/**
* Verifies that the lock can be automatically renewed.
*/
@Test
void autoRenewLockOnReceiveMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(Duration.ofSeconds(2));
sender.send(message).block(TIMEOUT);
final ServiceBusReceiverAsyncClient receiver = new ServiceBusClientBuilder()
.connectionString(getConnectionString())
.receiver()
.receiveMode(ReceiveMode.PEEK_LOCK)
.queueName(getQueueName())
.buildAsyncClient();
try {
StepVerifier.create(receiver.receive(options))
.assertNext(received -> {
Assertions.assertNotNull(received.getLockedUntil());
Assertions.assertNotNull(received.getLockToken());
logger.info("{}: lockId[{}]. lockedUntil[{}]",
received.getSequenceNumber(), received.getLockToken(), received.getLockedUntil());
final Instant initial = received.getLockedUntil();
Instant latest = Instant.MIN;
for (int i = 0; i < 3; i++) {
try {
TimeUnit.SECONDS.sleep(15);
} catch (InterruptedException error) {
logger.error("Error occurred while sleeping: " + error);
}
Assertions.assertNotNull(received.getLockedUntil());
latest = received.getLockedUntil();
}
Assertions.assertTrue(initial.isBefore(latest),
String.format("Latest should be after initial. initial: %s. latest: %s", initial, latest));
logger.info("Completing message.");
receiver.complete(received).block(Duration.ofSeconds(15));
})
.thenCancel()
.verify();
} finally {
receiver.close();
}
}
@Test
void receiveAndDeleteWithBinaryData() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).thenMany(receiveDeleteModeReceiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.expectNoEvent(timeout)
.thenCancel()
.verify();
}
@Test
void receiveAndAbandon() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiverManualComplete.abandon(receivedMessage))
.verifyComplete();
}
@Test
void receiveBySequenceNumberAndDeadletter() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive(options).next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(TIMEOUT);
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.deadLetter(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
}
@Test
@Test
void sendReceiveMessageWithVariousPropertyTypes() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage messageToSend = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
Map<String, Object> sentProperties = messageToSend.getProperties();
sentProperties.put("NullProperty", null);
sentProperties.put("BooleanProperty", true);
sentProperties.put("ByteProperty", (byte) 1);
sentProperties.put("ShortProperty", (short) 2);
sentProperties.put("IntProperty", 3);
sentProperties.put("LongProperty", 4L);
sentProperties.put("FloatProperty", 5.5f);
sentProperties.put("DoubleProperty", 6.6f);
sentProperties.put("CharProperty", 'z');
sentProperties.put("UUIDProperty", UUID.randomUUID());
sentProperties.put("StringProperty", "string");
sender.send(messageToSend).block(TIMEOUT);
StepVerifier.create(receiveDeleteModeReceiver.receive(options))
.assertNext(receivedMessage -> {
Map<String, Object> receivedProperties = receivedMessage.getProperties();
for (Map.Entry<String, Object> sentEntry : sentProperties.entrySet()) {
if (sentEntry.getValue() != null && sentEntry.getValue().getClass().isArray()) {
Assertions.assertArrayEquals((Object[]) sentEntry.getValue(), (Object[]) receivedProperties.get(sentEntry.getKey()));
} else {
Assertions.assertEquals(sentEntry.getValue(), receivedProperties.get(sentEntry.getKey()));
}
}
})
.expectNoEvent(timeout)
.thenCancel()
.verify();
}
@Test
void receiveBySequenceNumberAndComplete() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(Duration.ofSeconds(30));
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.complete(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
}
} | class ServiceBusReceiverAsyncClientIntegrationTest extends IntegrationTestBase {
private static final String CONTENTS = "Test-contents";
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClientIntegrationTest.class);
private ServiceBusReceiverAsyncClient receiver;
private ServiceBusReceiverAsyncClient receiverManualComplete;
private ServiceBusReceiverAsyncClient receiveDeleteModeReceiver;
private ServiceBusSenderAsyncClient sender;
ServiceBusReceiverAsyncClientIntegrationTest() {
super(new ClientLogger(ServiceBusReceiverAsyncClientIntegrationTest.class));
}
@Override
protected void beforeTest() {
final String queueName = getQueueName();
Assertions.assertNotNull(queueName, "'queueName' cannot be null.");
sender = createBuilder().sender().queueName(queueName).buildAsyncClient();
receiver = createBuilder()
.receiver()
.queueName(queueName)
.buildAsyncClient();
receiverManualComplete = createBuilder()
.receiver()
.queueName(queueName)
.buildAsyncClient();
receiveDeleteModeReceiver = createBuilder()
.receiver()
.queueName(queueName)
.receiveMode(ReceiveMode.RECEIVE_AND_DELETE)
.buildAsyncClient();
}
@Override
protected void afterTest() {
dispose(receiver, receiverManualComplete, receiveDeleteModeReceiver, sender);
}
/**
* Verifies that we can send and receive two messages.
*/
@Disabled("Problem when receiving two messages. Link is closed prematurely.")
@Test
void receiveTwoMessagesAutoComplete() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).then(sender.send(message))
.thenMany(receiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.thenCancel()
.verify();
}
/**
* Verifies that we can send and receive a message.
*/
@Test
void receiveMessageAutoComplete() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).thenMany(receiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.thenCancel()
.verify();
}
/**
* Verifies that we can send and peek a message.
*/
@Test
void peekMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(receiver.peek()))
.assertNext(receivedMessage -> {
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can schedule and receive a message.
*/
@Test
void sendScheduledMessageAndReceive() {
final String messageId = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message = TestUtils.getServiceBusMessage(contents, messageId, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(2);
sender.scheduleMessage(message, scheduledEnqueueTime)
.delaySubscription(Duration.ofSeconds(3))
.block();
StepVerifier.create(receiver.receive().take(1))
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(contents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can schedule and receive multiple messages.
*/
@Test
void sendMultipleScheduledMessageAndReceive() {
final String messageId1 = UUID.randomUUID().toString();
final String messageId2 = UUID.randomUUID().toString();
String contents = "Some-contents";
final ServiceBusMessage message1 = TestUtils.getServiceBusMessage(contents, messageId1, 0);
final ServiceBusMessage message2 = TestUtils.getServiceBusMessage(contents, messageId2, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(1);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
sender.scheduleMessage(message1, scheduledEnqueueTime)
.block(TIMEOUT);
sender.scheduleMessage(message2, scheduledEnqueueTime)
.block(TIMEOUT);
String finalContents = contents;
StepVerifier.create(receiveDeleteModeReceiver.receive(options).take(2))
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(finalContents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.assertNext(receivedMessage -> {
Assertions.assertArrayEquals(finalContents.getBytes(), receivedMessage.getBody());
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can cancel a scheduled message.
*/
@Test
void cancelScheduledMessage() {
final String messageId = UUID.randomUUID().toString();
final String contents = "Some-contents";
final ServiceBusMessage message = TestUtils.getServiceBusMessage(contents, messageId, 0);
final Instant scheduledEnqueueTime = Instant.now().plusSeconds(10);
final Duration delayDuration = Duration.ofSeconds(3);
final Long sequenceNumber = sender.scheduleMessage(message, scheduledEnqueueTime).block();
logger.verbose("Scheduled the message, sequence number {}.", sequenceNumber);
Assertions.assertNotNull(sequenceNumber);
Mono.delay(delayDuration)
.then(sender.cancelScheduledMessage(sequenceNumber))
.block();
logger.verbose("Cancelled the scheduled message, sequence number {}.", sequenceNumber);
StepVerifier.create(receiver.receive().take(1))
.expectNoEvent(Duration.ofSeconds(5))
.verifyComplete();
}
/**
* Verifies that we can send and peek a message.
*/
@Test
void peekFromSequenceNumberMessage() {
final long fromSequenceNumber = 1;
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
StepVerifier.create(sender.send(message).then(receiver.peekAt(fromSequenceNumber)))
.assertNext(receivedMessage -> {
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID));
})
.verifyComplete();
}
/**
* Verifies that we can send and peek a batch of messages.
*/
@Test
void peekBatchMessages() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
int maxMessages = 2;
StepVerifier.create(Mono.when(sender.send(message), sender.send(message))
.thenMany(receiver.peekBatch(maxMessages)))
.expectNextCount(maxMessages)
.verifyComplete();
}
/**
* Verifies that we can send and peek a batch of messages.
*/
@Test
void peekBatchMessagesFromSequence() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
int maxMessages = 2;
int fromSequenceNumber = 1;
StepVerifier.create(Mono.when(sender.send(message), sender.send(message))
.thenMany(receiver.peekBatchAt(maxMessages, fromSequenceNumber)))
.expectNextCount(maxMessages)
.verifyComplete();
}
/**
* Verifies that we can deadletter a message.
*/
@Test
void deadLetterMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiver.receive(options).next())
.block(Duration.ofSeconds(30));
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiver.deadLetter(receivedMessage))
.verifyComplete();
}
/**
* Verifies that we can renew message lock.
*/
@Test
void receiveAndRenewLock() {
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, "id-1", 0);
final AtomicReference<ServiceBusReceivedMessage> receivedMessage = new AtomicReference<>();
final AtomicReference<Instant> initialLock = new AtomicReference<>();
final ReceiveAsyncOptions options = new ReceiveAsyncOptions()
.setEnableAutoComplete(false)
.setMaxAutoRenewDuration(null);
sender.send(message).block(TIMEOUT);
ServiceBusReceivedMessage m = receiver.receive(options).next().block(TIMEOUT);
Assertions.assertNotNull(m);
Assertions.assertNotNull(m.getLockedUntil());
receivedMessage.set(m);
initialLock.set(m.getLockedUntil());
StepVerifier.create(Mono.delay(Duration.ofSeconds(10))
.then(Mono.defer(() -> receiver.renewMessageLock(receivedMessage.get()))))
.assertNext(lockedUntil -> {
Assertions.assertTrue(lockedUntil.isAfter(initialLock.get()),
String.format("Updated lock is not after the initial Lock. updated: [%s]. initial:[%s]",
lockedUntil, initialLock.get()));
Assertions.assertEquals(receivedMessage.get().getLockedUntil(), lockedUntil);
})
.verifyComplete();
}
/**
* Verifies that the lock can be automatically renewed.
*/
@Test
void autoRenewLockOnReceiveMessage() {
final String messageId = UUID.randomUUID().toString();
final ServiceBusMessage message = getServiceBusMessage(CONTENTS, messageId, 0);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(Duration.ofSeconds(2));
sender.send(message).block(TIMEOUT);
final ServiceBusReceiverAsyncClient receiver = new ServiceBusClientBuilder()
.connectionString(getConnectionString())
.receiver()
.receiveMode(ReceiveMode.PEEK_LOCK)
.queueName(getQueueName())
.buildAsyncClient();
try {
StepVerifier.create(receiver.receive(options))
.assertNext(received -> {
Assertions.assertNotNull(received.getLockedUntil());
Assertions.assertNotNull(received.getLockToken());
logger.info("{}: lockId[{}]. lockedUntil[{}]",
received.getSequenceNumber(), received.getLockToken(), received.getLockedUntil());
final Instant initial = received.getLockedUntil();
Instant latest = Instant.MIN;
for (int i = 0; i < 3; i++) {
try {
TimeUnit.SECONDS.sleep(15);
} catch (InterruptedException error) {
logger.error("Error occurred while sleeping: " + error);
}
Assertions.assertNotNull(received.getLockedUntil());
latest = received.getLockedUntil();
}
Assertions.assertTrue(initial.isBefore(latest),
String.format("Latest should be after initial. initial: %s. latest: %s", initial, latest));
logger.info("Completing message.");
receiver.complete(received).block(Duration.ofSeconds(15));
})
.thenCancel()
.verify();
} finally {
receiver.close();
}
}
@Test
void receiveAndDeleteWithBinaryData() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
StepVerifier.create(sender.send(message).thenMany(receiveDeleteModeReceiver.receive(options)))
.assertNext(receivedMessage ->
Assertions.assertTrue(receivedMessage.getProperties().containsKey(MESSAGE_TRACKING_ID)))
.expectNoEvent(timeout)
.thenCancel()
.verify();
}
@Test
void receiveAndAbandon() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive().next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
StepVerifier.create(receiverManualComplete.abandon(receivedMessage))
.verifyComplete();
}
@Test
void receiveBySequenceNumberAndDeadletter() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive(options).next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(TIMEOUT);
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.deadLetter(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
}
@Test
@Test
void sendReceiveMessageWithVariousPropertyTypes() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage messageToSend = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
Map<String, Object> sentProperties = messageToSend.getProperties();
sentProperties.put("NullProperty", null);
sentProperties.put("BooleanProperty", true);
sentProperties.put("ByteProperty", (byte) 1);
sentProperties.put("ShortProperty", (short) 2);
sentProperties.put("IntProperty", 3);
sentProperties.put("LongProperty", 4L);
sentProperties.put("FloatProperty", 5.5f);
sentProperties.put("DoubleProperty", 6.6f);
sentProperties.put("CharProperty", 'z');
sentProperties.put("UUIDProperty", UUID.randomUUID());
sentProperties.put("StringProperty", "string");
sender.send(messageToSend).block(TIMEOUT);
StepVerifier.create(receiveDeleteModeReceiver.receive(options))
.assertNext(receivedMessage -> {
Map<String, Object> receivedProperties = receivedMessage.getProperties();
for (Map.Entry<String, Object> sentEntry : sentProperties.entrySet()) {
if (sentEntry.getValue() != null && sentEntry.getValue().getClass().isArray()) {
Assertions.assertArrayEquals((Object[]) sentEntry.getValue(), (Object[]) receivedProperties.get(sentEntry.getKey()));
} else {
Assertions.assertEquals(sentEntry.getValue(), receivedProperties.get(sentEntry.getKey()));
}
}
})
.expectNoEvent(timeout)
.thenCancel()
.verify();
}
@Test
void receiveBySequenceNumberAndComplete() {
final String messageTrackingId = UUID.randomUUID().toString();
final ServiceBusMessage message = TestUtils.getServiceBusMessage(CONTENTS, messageTrackingId, 0);
final Duration timeout = Duration.ofSeconds(2);
final ReceiveAsyncOptions options = new ReceiveAsyncOptions().setEnableAutoComplete(false);
final ServiceBusReceivedMessage receivedMessage = sender.send(message)
.then(receiverManualComplete.receive(options).next())
.block(TIMEOUT);
Assertions.assertNotNull(receivedMessage);
receiverManualComplete.defer(receivedMessage).block(Duration.ofSeconds(30));
final ServiceBusReceivedMessage receivedDeferredMessage = receiverManualComplete
.receiveDeferredMessage(receivedMessage.getSequenceNumber()).block(TIMEOUT);
Assertions.assertNotNull(receivedDeferredMessage);
Assertions.assertEquals(receivedMessage.getSequenceNumber(), receivedDeferredMessage.getSequenceNumber());
receiverManualComplete.complete(receivedDeferredMessage).block(TIMEOUT);
StepVerifier.create(receiverManualComplete.receiveDeferredMessage(receivedMessage.getSequenceNumber()))
.expectNextCount(0)
.thenCancel()
.verify(timeout);
}
} |
So you don't need to do the conversion to uuid from string | public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber))
.map(receivedMessage -> {
if (receiveMode == ReceiveMode.PEEK_LOCK && !CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
if (!ZERO_LOCK_TOKEN.equals(UUID.fromString(receivedMessage.getLockToken()))) {
messageLockContainer.addOrUpdate(receivedMessage.getLockToken(), receivedMessage.getLockedUntil());
}
}
return receivedMessage;
});
} | if (receiveMode == ReceiveMode.PEEK_LOCK && !CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) { | public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessage(receiveMode, sequenceNumber))
.map(receivedMessage -> {
if (receiveMode == ReceiveMode.PEEK_LOCK && !CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
receivedMessage.setLockedUntil(messageLockContainer.addOrUpdate(receivedMessage.getLockToken(),
receivedMessage.getLockedUntil()));
}
return receivedMessage;
});
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private static final UUID ZERO_LOCK_TOKEN = new UUID(0L, 0L);
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final ReceiveAsyncOptions defaultReceiveOptions;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers. Key: linkName Value: consumer associated with that
* linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiverOptions receiverOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiverOptions.getPrefetchCount();
this.receiveMode = receiverOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
this.defaultReceiveOptions = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(connectionProcessor.getRetryOptions().getTryTimeout());
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity and completes them
* when they are finished processing.
*
* <p>
* By default, each successfully consumed message is {@link
*
* auto-completion feature will {@link
*
* operation timeout} has elapsed.
* </p>
*
* @return A stream of messages from the Service Bus entity.
* @throws AmqpException if {@link AmqpRetryOptions
* downstream consumers are still processing the message.
*/
public Flux<ServiceBusReceivedMessage> receive() {
return receive(defaultReceiveOptions);
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity with a set of
* options. To disable lock auto-renewal, set {@link ReceiveAsyncOptions
* setMaxAutoRenewDuration} to {@link Duration
*
* @param options Set of options to set when receiving messages.
* @return A stream of messages from the Service Bus entity.
* @throws NullPointerException if {@code options} is null.
* @throws IllegalArgumentException if {@link ReceiveAsyncOptions
* duration} is negative.
*/
public Flux<ServiceBusReceivedMessage> receive(ReceiveAsyncOptions options) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (Objects.isNull(options)) {
return fluxError(logger, new NullPointerException("'options' cannot be null"));
} else if (options.getMaxAutoRenewDuration() != null && options.getMaxAutoRenewDuration().isNegative()) {
return fluxError(logger, new IllegalArgumentException("'maxAutoRenewDuration' cannot be negative."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && options.isEnableAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Auto-complete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath, options)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(String lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final String lockToken = message.getLockToken();
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName, ReceiveAsyncOptions options) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName, null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor, context));
final boolean isAutoLockRenewal = options.getMaxAutoRenewDuration() != null
&& !options.getMaxAutoRenewDuration().isZero();
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
options.isEnableAutoComplete(), isAutoLockRenewal, options.getMaxAutoRenewDuration(),
connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final boolean isSessionEnabled;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final int prefetch;
private final ReceiveMode receiveMode;
private final MessageLockContainer messageLockContainer;
private final ReceiveAsyncOptions defaultReceiveOptions;
private final Runnable onClientClose;
/**
* Map containing linkNames and their associated consumers. Key: linkName Value: consumer associated with that
* linkName.
*/
private final ConcurrentHashMap<String, ServiceBusAsyncConsumer> openConsumers = new ConcurrentHashMap<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param isSessionEnabled {@code true} if sessions are enabled; {@code false} otherwise.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param messageLockContainer Container for message locks.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
boolean isSessionEnabled, ReceiverOptions receiverOptions,
ServiceBusConnectionProcessor connectionProcessor, TracerProvider tracerProvider,
MessageSerializer messageSerializer, MessageLockContainer messageLockContainer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveMessageOptions' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.prefetch = receiverOptions.getPrefetchCount();
this.receiveMode = receiverOptions.getReceiveMode();
this.entityType = entityType;
this.isSessionEnabled = isSessionEnabled;
this.messageLockContainer = messageLockContainer;
this.onClientClose = onClientClose;
this.defaultReceiveOptions = new ReceiveAsyncOptions()
.setEnableAutoComplete(true)
.setMaxAutoRenewDuration(connectionProcessor.getRetryOptions().getTryTimeout());
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken) {
return abandon(lockToken, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> abandon(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null, propertiesToModify);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> complete(MessageLockToken lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken) {
return defer(lockToken, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
*/
public Mono<Void> defer(MessageLockToken lockToken, Map<String, Object> propertiesToModify) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null, propertiesToModify);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(MessageLockToken lockToken) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Void> deadLetter(MessageLockToken lockToken, DeadLetterOptions deadLetterOptions) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(ServiceBusManagementNode::peek);
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages));
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peekBatch(maxMessages, sequenceNumber));
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity and completes them
* when they are finished processing.
*
* <p>
* By default, each successfully consumed message is {@link
*
* auto-completion feature will {@link
*
* operation timeout} has elapsed.
* </p>
*
* @return A stream of messages from the Service Bus entity.
* @throws AmqpException if {@link AmqpRetryOptions
* downstream consumers are still processing the message.
*/
public Flux<ServiceBusReceivedMessage> receive() {
return receive(defaultReceiveOptions);
}
/**
* Receives a stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity with a set of
* options. To disable lock auto-renewal, set {@link ReceiveAsyncOptions
* setMaxAutoRenewDuration} to {@link Duration
*
* @param options Set of options to set when receiving messages.
* @return A stream of messages from the Service Bus entity.
* @throws NullPointerException if {@code options} is null.
* @throws IllegalArgumentException if {@link ReceiveAsyncOptions
* duration} is negative.
*/
public Flux<ServiceBusReceivedMessage> receive(ReceiveAsyncOptions options) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receive")));
}
if (Objects.isNull(options)) {
return fluxError(logger, new NullPointerException("'options' cannot be null"));
} else if (options.getMaxAutoRenewDuration() != null && options.getMaxAutoRenewDuration().isNegative()) {
return fluxError(logger, new IllegalArgumentException("'maxAutoRenewDuration' cannot be negative."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK && options.isEnableAutoComplete()) {
return Flux.error(logger.logExceptionAsError(new UnsupportedOperationException(
"Auto-complete is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.")));
}
return Flux.usingWhen(
Mono.fromCallable(() -> getOrCreateConsumer(entityPath, options)),
consumer -> consumer.receive(),
consumer -> {
final String linkName = consumer.getLinkName();
logger.info("{}: Receiving completed. Disposing", linkName);
final ServiceBusAsyncConsumer removed = openConsumers.remove(linkName);
if (removed == null) {
logger.warning("Could not find consumer to remove for: {}", linkName);
} else {
removed.close();
}
return Mono.empty();
});
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(long... sequenceNumbers) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessageBatch(receiveMode, sequenceNumbers));
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@link MessageLockToken
*/
public Mono<Instant> renewMessageLock(MessageLockToken lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(lockToken.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (lockToken.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
UUID lockTokenUuid = null;
try {
lockTokenUuid = UUID.fromString(lockToken.getLockToken());
} catch (IllegalArgumentException ex) {
monoError(logger, ex);
}
UUID finalLockTokenUuid = lockTokenUuid;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(finalLockTokenUuid))
.map(instant -> {
if (lockToken instanceof ServiceBusReceivedMessage) {
((ServiceBusReceivedMessage) lockToken).setLockedUntil(instant);
}
return instant;
});
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
openConsumers.keySet().forEach(key -> {
final ServiceBusAsyncConsumer consumer = openConsumers.get(key);
if (consumer != null) {
consumer.close();
}
});
openConsumers.clear();
onClientClose.run();
}
private Mono<Boolean> isLockTokenValid(String lockToken) {
final Instant lockedUntilUtc = messageLockContainer.getLockTokenExpiration(lockToken);
if (lockedUntilUtc == null) {
logger.warning("lockToken[{}] is not owned by this receiver.", lockToken);
return Mono.just(false);
}
final Instant now = Instant.now();
if (lockedUntilUtc.isBefore(now)) {
return Mono.error(logger.logExceptionAsError(new AmqpException(false, String.format(
"Lock already expired for the lock token. Expiration: '%s'. Now: '%s'", lockedUntilUtc, now),
getErrorContext())));
}
return Mono.just(true);
}
private Mono<Void> updateDisposition(MessageLockToken message, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(message)) {
return monoError(logger, new NullPointerException("'receivedMessage' cannot be null."));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
if (receiveMode != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
} else if (Objects.isNull(message.getLockToken())) {
return monoError(logger, new NullPointerException("'receivedMessage.lockToken' cannot be null."));
} else if (message.getLockToken().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'message.lockToken' cannot be empty."));
}
final String lockToken = message.getLockToken();
final Instant instant = messageLockContainer.getLockTokenExpiration(lockToken);
logger.info("{}: Update started. Disposition: {}. Lock: {}. Expiration: {}",
entityPath, dispositionStatus, lockToken, instant);
return isLockTokenValid(lockToken).flatMap(isLocked -> {
return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> {
if (isLocked) {
return node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify);
} else {
return Mono.error(new UnsupportedOperationException(
"Cannot complete a message that is not locked. lockToken: " + lockToken));
}
});
}).then(Mono.fromRunnable(() -> {
logger.info("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken);
messageLockContainer.remove(lockToken);
}));
}
private ServiceBusAsyncConsumer getOrCreateConsumer(String linkName, ReceiveAsyncOptions options) {
return openConsumers.computeIfAbsent(linkName, name -> {
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<AmqpReceiveLink> receiveLink =
connectionProcessor.flatMap(connection -> connection.createReceiveLink(linkName, entityPath,
receiveMode, isSessionEnabled, null, entityType))
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiveMode, isSessionEnabled, "N/A",
entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName, null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(prefetch, retryPolicy, connectionProcessor, context));
final boolean isAutoLockRenewal = options.getMaxAutoRenewDuration() != null
&& !options.getMaxAutoRenewDuration().isZero();
return new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer,
options.isEnableAutoComplete(), isAutoLockRenewal, options.getMaxAutoRenewDuration(),
connectionProcessor.getRetryOptions(), messageLockContainer,
this::complete, this::abandon, this::renewMessageLock);
});
}
private AmqpErrorContext getErrorContext() {
return new SessionErrorContext(getFullyQualifiedNamespace(), getEntityPath());
}
} |
Does setting the default timeout change the timeout for all tests running outside this class too? | static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
} | StepVerifier.setDefaultTimeout(Duration.ofSeconds(30)); | static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
} | class ActiveClientTokenManagerTest {
private static final String AUDIENCE = "an-audience-test";
private static final String SCOPES = "scopes-test";
private static final Duration DEFAULT_DURATION = Duration.ofSeconds(20);
@Mock
private ClaimsBasedSecurityNode cbsNode;
@BeforeAll
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@BeforeEach
void setup() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
void teardown() {
Mockito.framework().clearInlineMocks();
cbsNode = null;
}
/**
* Verify that we can get successes and errors from CBS node.
*/
@Test
void getAuthorizationResults() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenCancel()
.verify();
}
/**
* Verify that we can get successes and errors from CBS node. This un-retriable error will not allow it to be
* rescheduled.
*/
@Test
void getAuthorizationResultsSuccessFailure() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final IllegalArgumentException error = new IllegalArgumentException("Some error");
final Duration expiryDuration = Duration.ofSeconds(20);
final AtomicInteger invocations = new AtomicInteger();
when(cbsNode.authorize(any(), any())).thenAnswer(invocationOnMock -> {
if (invocations.incrementAndGet() < 3) {
return Mono.just(OffsetDateTime.now(ZoneOffset.UTC).plusSeconds(20));
} else {
return Mono.error(error);
}
});
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(expiryDuration)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(expiryDuration)
.expectError(IllegalArgumentException.class)
.verifyThenAssertThat()
.hasNotDroppedElements()
.hasNotDroppedElements()
.hasNotDroppedErrors();
}
/**
* Verify that we cannot authorize with CBS node when it has already been disposed of.
*/
@Test
void cannotAuthorizeDisposedInstance() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION));
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
tokenManager.authorize().then(Mono.fromRunnable(tokenManager::close)).block();
StepVerifier.create(tokenManager.authorize())
.expectError(AzureException.class)
.verify();
}
/**
* Verify that the ActiveClientTokenManager reschedules the authorization task.
*/
@SuppressWarnings("unchecked")
@Test
void getAuthorizationResultsRetriableError() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final AmqpException error = new AmqpException(true, AmqpErrorCondition.ARGUMENT_ERROR,
"Retryable argument error", new AmqpErrorContext("Test-context-namespace"));
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION), Mono.error(error),
getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.thenAwait(DEFAULT_DURATION)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenCancel()
.verify();
}
/**
* Verify that the ActiveClientTokenManager does not get more authorization tasks.
*/
@SuppressWarnings("unchecked")
@Test
void getAuthorizationResultsNonRetriableError() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final AmqpException error = new AmqpException(false, AmqpErrorCondition.TIMEOUT_ERROR, "Test CBS node error.",
new AmqpErrorContext("Test-context-namespace"));
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION), Mono.error(error),
getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.expectErrorSatisfies(throwable -> {
Assertions.assertTrue(throwable instanceof AmqpException);
Assertions.assertFalse(((AmqpException)throwable).isTransient());
})
.verify();
}
private Mono<OffsetDateTime> getNextExpiration(Duration duration) {
return Mono.fromCallable(() -> OffsetDateTime.now(ZoneOffset.UTC).plus(duration));
}
} | class ActiveClientTokenManagerTest {
private static final String AUDIENCE = "an-audience-test";
private static final String SCOPES = "scopes-test";
private static final Duration DEFAULT_DURATION = Duration.ofSeconds(20);
@Mock
private ClaimsBasedSecurityNode cbsNode;
@BeforeAll
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@BeforeEach
void setup() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
void teardown() {
Mockito.framework().clearInlineMocks();
cbsNode = null;
}
/**
* Verify that we can get successes and errors from CBS node.
*/
@Test
void getAuthorizationResults() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenCancel()
.verify();
}
/**
* Verify that we can get successes and errors from CBS node. This un-retriable error will not allow it to be
* rescheduled.
*/
@Test
void getAuthorizationResultsSuccessFailure() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final IllegalArgumentException error = new IllegalArgumentException("Some error");
final Duration expiryDuration = Duration.ofSeconds(20);
final AtomicInteger invocations = new AtomicInteger();
when(cbsNode.authorize(any(), any())).thenAnswer(invocationOnMock -> {
if (invocations.incrementAndGet() < 3) {
return Mono.just(OffsetDateTime.now(ZoneOffset.UTC).plusSeconds(20));
} else {
return Mono.error(error);
}
});
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(expiryDuration)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(expiryDuration)
.expectError(IllegalArgumentException.class)
.verifyThenAssertThat()
.hasNotDroppedElements()
.hasNotDroppedElements()
.hasNotDroppedErrors();
}
/**
* Verify that we cannot authorize with CBS node when it has already been disposed of.
*/
@Test
void cannotAuthorizeDisposedInstance() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION));
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
tokenManager.authorize().then(Mono.fromRunnable(tokenManager::close)).block();
StepVerifier.create(tokenManager.authorize())
.expectError(AzureException.class)
.verify();
}
/**
* Verify that the ActiveClientTokenManager reschedules the authorization task.
*/
@SuppressWarnings("unchecked")
@Test
void getAuthorizationResultsRetriableError() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final AmqpException error = new AmqpException(true, AmqpErrorCondition.ARGUMENT_ERROR,
"Retryable argument error", new AmqpErrorContext("Test-context-namespace"));
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION), Mono.error(error),
getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.thenAwait(DEFAULT_DURATION)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenCancel()
.verify();
}
/**
* Verify that the ActiveClientTokenManager does not get more authorization tasks.
*/
@SuppressWarnings("unchecked")
@Test
void getAuthorizationResultsNonRetriableError() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final AmqpException error = new AmqpException(false, AmqpErrorCondition.TIMEOUT_ERROR, "Test CBS node error.",
new AmqpErrorContext("Test-context-namespace"));
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION), Mono.error(error),
getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.expectErrorSatisfies(throwable -> {
Assertions.assertTrue(throwable instanceof AmqpException);
Assertions.assertFalse(((AmqpException) throwable).isTransient());
})
.verify();
}
private Mono<OffsetDateTime> getNextExpiration(Duration duration) {
return Mono.fromCallable(() -> OffsetDateTime.now(ZoneOffset.UTC).plus(duration));
}
} |
No. I have an `AfterAll` notation to reset the timeout. | static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
} | StepVerifier.setDefaultTimeout(Duration.ofSeconds(30)); | static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
} | class ActiveClientTokenManagerTest {
private static final String AUDIENCE = "an-audience-test";
private static final String SCOPES = "scopes-test";
private static final Duration DEFAULT_DURATION = Duration.ofSeconds(20);
@Mock
private ClaimsBasedSecurityNode cbsNode;
@BeforeAll
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@BeforeEach
void setup() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
void teardown() {
Mockito.framework().clearInlineMocks();
cbsNode = null;
}
/**
* Verify that we can get successes and errors from CBS node.
*/
@Test
void getAuthorizationResults() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenCancel()
.verify();
}
/**
* Verify that we can get successes and errors from CBS node. This un-retriable error will not allow it to be
* rescheduled.
*/
@Test
void getAuthorizationResultsSuccessFailure() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final IllegalArgumentException error = new IllegalArgumentException("Some error");
final Duration expiryDuration = Duration.ofSeconds(20);
final AtomicInteger invocations = new AtomicInteger();
when(cbsNode.authorize(any(), any())).thenAnswer(invocationOnMock -> {
if (invocations.incrementAndGet() < 3) {
return Mono.just(OffsetDateTime.now(ZoneOffset.UTC).plusSeconds(20));
} else {
return Mono.error(error);
}
});
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(expiryDuration)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(expiryDuration)
.expectError(IllegalArgumentException.class)
.verifyThenAssertThat()
.hasNotDroppedElements()
.hasNotDroppedElements()
.hasNotDroppedErrors();
}
/**
* Verify that we cannot authorize with CBS node when it has already been disposed of.
*/
@Test
void cannotAuthorizeDisposedInstance() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION));
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
tokenManager.authorize().then(Mono.fromRunnable(tokenManager::close)).block();
StepVerifier.create(tokenManager.authorize())
.expectError(AzureException.class)
.verify();
}
/**
* Verify that the ActiveClientTokenManager reschedules the authorization task.
*/
@SuppressWarnings("unchecked")
@Test
void getAuthorizationResultsRetriableError() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final AmqpException error = new AmqpException(true, AmqpErrorCondition.ARGUMENT_ERROR,
"Retryable argument error", new AmqpErrorContext("Test-context-namespace"));
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION), Mono.error(error),
getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.thenAwait(DEFAULT_DURATION)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenCancel()
.verify();
}
/**
* Verify that the ActiveClientTokenManager does not get more authorization tasks.
*/
@SuppressWarnings("unchecked")
@Test
void getAuthorizationResultsNonRetriableError() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final AmqpException error = new AmqpException(false, AmqpErrorCondition.TIMEOUT_ERROR, "Test CBS node error.",
new AmqpErrorContext("Test-context-namespace"));
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION), Mono.error(error),
getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.expectErrorSatisfies(throwable -> {
Assertions.assertTrue(throwable instanceof AmqpException);
Assertions.assertFalse(((AmqpException)throwable).isTransient());
})
.verify();
}
private Mono<OffsetDateTime> getNextExpiration(Duration duration) {
return Mono.fromCallable(() -> OffsetDateTime.now(ZoneOffset.UTC).plus(duration));
}
} | class ActiveClientTokenManagerTest {
private static final String AUDIENCE = "an-audience-test";
private static final String SCOPES = "scopes-test";
private static final Duration DEFAULT_DURATION = Duration.ofSeconds(20);
@Mock
private ClaimsBasedSecurityNode cbsNode;
@BeforeAll
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@BeforeEach
void setup() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
void teardown() {
Mockito.framework().clearInlineMocks();
cbsNode = null;
}
/**
* Verify that we can get successes and errors from CBS node.
*/
@Test
void getAuthorizationResults() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenCancel()
.verify();
}
/**
* Verify that we can get successes and errors from CBS node. This un-retriable error will not allow it to be
* rescheduled.
*/
@Test
void getAuthorizationResultsSuccessFailure() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final IllegalArgumentException error = new IllegalArgumentException("Some error");
final Duration expiryDuration = Duration.ofSeconds(20);
final AtomicInteger invocations = new AtomicInteger();
when(cbsNode.authorize(any(), any())).thenAnswer(invocationOnMock -> {
if (invocations.incrementAndGet() < 3) {
return Mono.just(OffsetDateTime.now(ZoneOffset.UTC).plusSeconds(20));
} else {
return Mono.error(error);
}
});
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(expiryDuration)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(expiryDuration)
.expectError(IllegalArgumentException.class)
.verifyThenAssertThat()
.hasNotDroppedElements()
.hasNotDroppedElements()
.hasNotDroppedErrors();
}
/**
* Verify that we cannot authorize with CBS node when it has already been disposed of.
*/
@Test
void cannotAuthorizeDisposedInstance() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION));
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
tokenManager.authorize().then(Mono.fromRunnable(tokenManager::close)).block();
StepVerifier.create(tokenManager.authorize())
.expectError(AzureException.class)
.verify();
}
/**
* Verify that the ActiveClientTokenManager reschedules the authorization task.
*/
@SuppressWarnings("unchecked")
@Test
void getAuthorizationResultsRetriableError() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final AmqpException error = new AmqpException(true, AmqpErrorCondition.ARGUMENT_ERROR,
"Retryable argument error", new AmqpErrorContext("Test-context-namespace"));
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION), Mono.error(error),
getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.thenAwait(DEFAULT_DURATION)
.expectNext(AmqpResponseCode.ACCEPTED)
.thenCancel()
.verify();
}
/**
* Verify that the ActiveClientTokenManager does not get more authorization tasks.
*/
@SuppressWarnings("unchecked")
@Test
void getAuthorizationResultsNonRetriableError() {
final Mono<ClaimsBasedSecurityNode> cbsNodeMono = Mono.fromCallable(() -> cbsNode);
final AmqpException error = new AmqpException(false, AmqpErrorCondition.TIMEOUT_ERROR, "Test CBS node error.",
new AmqpErrorContext("Test-context-namespace"));
when(cbsNode.authorize(any(), any())).thenReturn(getNextExpiration(DEFAULT_DURATION), Mono.error(error),
getNextExpiration(DEFAULT_DURATION));
StepVerifier.withVirtualTime(() -> {
final ActiveClientTokenManager tokenManager = new ActiveClientTokenManager(cbsNodeMono, AUDIENCE, SCOPES);
return tokenManager.authorize().thenMany(tokenManager.getAuthorizationResults());
})
.expectNext(AmqpResponseCode.ACCEPTED)
.thenAwait(DEFAULT_DURATION)
.expectErrorSatisfies(throwable -> {
Assertions.assertTrue(throwable instanceof AmqpException);
Assertions.assertFalse(((AmqpException) throwable).isTransient());
})
.verify();
}
private Mono<OffsetDateTime> getNextExpiration(Duration duration) {
return Mono.fromCallable(() -> OffsetDateTime.now(ZoneOffset.UTC).plus(duration));
}
} |
It would be better to have a null check in all these retryPolicy setters. | public KeyClientBuilder retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
} | this.retryPolicy = retryPolicy; | public KeyClientBuilder retryPolicy(RetryPolicy retryPolicy) {
Objects.requireNonNull(retryPolicy, "The retry policy cannot be bull");
this.retryPolicy = retryPolicy;
return this;
} | class KeyClientBuilder {
private final ClientLogger logger = new ClientLogger(KeyClientBuilder.class);
private static final String AZURE_KEY_VAULT_KEYS = "azure-key-vault-keys.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private final List<HttpPipelinePolicy> policies;
private final Map<String, String> properties;
private TokenCredential credential;
private HttpPipeline pipeline;
private URL vaultUrl;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private RetryPolicy retryPolicy;
private Configuration configuration;
private KeyServiceVersion version;
/**
* The constructor with defaults.
*/
public KeyClientBuilder() {
retryPolicy = new RetryPolicy();
httpLogOptions = new HttpLogOptions();
policies = new ArrayList<>();
properties = CoreUtils.getProperties(AZURE_KEY_VAULT_KEYS);
}
/**
* Creates a {@link KeyClient} based on options set in the builder.
* Every time {@code buildClient()} is called, a new instance of {@link KeyClient} is created.
*
* <p>If {@link KeyClientBuilder
* {@link KeyClientBuilder
* All other builder settings are ignored. If {@code pipeline} is not set, then {@link
* KeyClientBuilder
* KeyClientBuilder
*
* @return A {@link KeyClient} with the options set from the builder.
* @throws IllegalStateException If {@link KeyClientBuilder
* {@link KeyClientBuilder
*/
public KeyClient buildClient() {
return new KeyClient(buildAsyncClient());
}
/**
* Creates a {@link KeyAsyncClient} based on options set in the builder.
* Every time {@code buildAsyncClient()} is called, a new instance of {@link KeyAsyncClient} is created.
*
* <p>If {@link KeyClientBuilder
* {@link KeyClientBuilder
* All other builder settings are ignored. If {@code pipeline} is not set, then {@link
* KeyClientBuilder
* key vault url are required to build the {@link KeyAsyncClient client}.}</p>
*
* @return A {@link KeyAsyncClient} with the options set from the builder.
* @throws IllegalStateException If {@link KeyClientBuilder
* {@link KeyClientBuilder
*/
public KeyAsyncClient buildAsyncClient() {
Configuration buildConfiguration =
(configuration == null) ? Configuration.getGlobalConfiguration().clone() : configuration;
URL buildEndpoint = getBuildEndpoint(buildConfiguration);
if (buildEndpoint == null) {
throw logger
.logExceptionAsError(new IllegalStateException(KeyVaultErrorCodeStrings
.getErrorString(KeyVaultErrorCodeStrings.VAULT_END_POINT_REQUIRED)));
}
KeyServiceVersion serviceVersion = version != null ? version : KeyServiceVersion.getLatest();
if (pipeline != null) {
return new KeyAsyncClient(vaultUrl, pipeline, serviceVersion);
}
if (credential == null) {
throw logger.logExceptionAsError(
new IllegalStateException(KeyVaultErrorCodeStrings
.getErrorString(KeyVaultErrorCodeStrings.CREDENTIAL_REQUIRED)));
}
final List<HttpPipelinePolicy> policies = new ArrayList<>();
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
policies.add(new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion,
buildConfiguration));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy);
policies.add(new KeyVaultCredentialPolicy(credential));
policies.addAll(this.policies);
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
return new KeyAsyncClient(vaultUrl, pipeline, serviceVersion);
}
/**
* Sets the vault url to send HTTP requests to.
*
* @param vaultUrl The vault url is used as destination on Azure to send requests to.
* @return the updated ServiceClientBuilder object.
* @throws IllegalArgumentException if {@code vaultUrl} is null or it cannot be parsed into a valid URL.
*/
public KeyClientBuilder vaultUrl(String vaultUrl) {
try {
this.vaultUrl = new URL(vaultUrl);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"The Azure Key Vault url is malformed.", ex));
}
return this;
}
/**
* Sets the credential to use when authenticating HTTP requests.
*
* @param credential The credential to use for authenticating HTTP requests.
* @return the updated {@link KeyClientBuilder} object.
* @throws NullPointerException if {@code credential} is {@code null}.
*/
public KeyClientBuilder credential(TokenCredential credential) {
Objects.requireNonNull(credential);
this.credential = credential;
return this;
}
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p> If logLevel is not provided, default value of {@link HttpLogDetailLevel
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated {@link KeyClientBuilder} object.
*/
public KeyClientBuilder httpLogOptions(HttpLogOptions logOptions) {
httpLogOptions = logOptions;
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after {@link KeyAsyncClient} and {@link
* KeyClient} required policies.
*
* @param policy The {@link HttpPipelinePolicy policy} to be added.
* @return the updated {@link KeyClientBuilder} object.
* @throws NullPointerException if {@code policy} is {@code null}.
*/
public KeyClientBuilder addPolicy(HttpPipelinePolicy policy) {
Objects.requireNonNull(policy);
policies.add(policy);
return this;
}
/**
* Sets the HTTP client to use for sending and receiving requests to and from the service.
*
* @param client The HTTP client to use for requests.
* @return the updated {@link KeyClientBuilder} object.
* @throws NullPointerException If {@code client} is {@code null}.
*/
public KeyClientBuilder httpClient(HttpClient client) {
Objects.requireNonNull(client);
this.httpClient = client;
return this;
}
/**
* Sets the HTTP pipeline to use for the service client.
*
* If {@code pipeline} is set, all other settings are ignored, aside from
* {@link KeyClientBuilder
*
* @param pipeline The HTTP pipeline to use for sending service requests and receiving responses.
* @return the updated {@link KeyClientBuilder} object.
*/
public KeyClientBuilder pipeline(HttpPipeline pipeline) {
Objects.requireNonNull(pipeline);
this.pipeline = pipeline;
return this;
}
/**
* Sets the {@link KeyServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link KeyServiceVersion} of the service to be used when making requests.
* @return The updated KeyClientBuilder object.
*/
public KeyClientBuilder serviceVersion(KeyServiceVersion version) {
this.version = version;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated KeyClientBuilder object.
*/
public KeyClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link RetryPolicy} that is used when each request is sent.
*
* The default retry policy will be used in the pipeline, if not provided.
*
* @param retryPolicy user's retry policy applied to each request.
* @return The updated KeyClientBuilder object.
*/
private URL getBuildEndpoint(Configuration configuration) {
if (vaultUrl != null) {
return vaultUrl;
}
String configEndpoint = configuration.get("AZURE_KEYVAULT_ENDPOINT");
if (CoreUtils.isNullOrEmpty(configEndpoint)) {
return null;
}
try {
return new URL(configEndpoint);
} catch (MalformedURLException ex) {
return null;
}
}
} | class KeyClientBuilder {
private final ClientLogger logger = new ClientLogger(KeyClientBuilder.class);
private static final String AZURE_KEY_VAULT_KEYS = "azure-key-vault-keys.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private final List<HttpPipelinePolicy> policies;
private final Map<String, String> properties;
private TokenCredential credential;
private HttpPipeline pipeline;
private URL vaultUrl;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private RetryPolicy retryPolicy;
private Configuration configuration;
private KeyServiceVersion version;
/**
* The constructor with defaults.
*/
public KeyClientBuilder() {
retryPolicy = new RetryPolicy();
httpLogOptions = new HttpLogOptions();
policies = new ArrayList<>();
properties = CoreUtils.getProperties(AZURE_KEY_VAULT_KEYS);
}
/**
* Creates a {@link KeyClient} based on options set in the builder.
* Every time {@code buildClient()} is called, a new instance of {@link KeyClient} is created.
*
* <p>If {@link KeyClientBuilder
* {@link KeyClientBuilder
* All other builder settings are ignored. If {@code pipeline} is not set, then {@link
* KeyClientBuilder
* KeyClientBuilder
*
* @return A {@link KeyClient} with the options set from the builder.
* @throws IllegalStateException If {@link KeyClientBuilder
* {@link KeyClientBuilder
*/
public KeyClient buildClient() {
return new KeyClient(buildAsyncClient());
}
/**
* Creates a {@link KeyAsyncClient} based on options set in the builder.
* Every time {@code buildAsyncClient()} is called, a new instance of {@link KeyAsyncClient} is created.
*
* <p>If {@link KeyClientBuilder
* {@link KeyClientBuilder
* All other builder settings are ignored. If {@code pipeline} is not set, then {@link
* KeyClientBuilder
* key vault url are required to build the {@link KeyAsyncClient client}.}</p>
*
* @return A {@link KeyAsyncClient} with the options set from the builder.
* @throws IllegalStateException If {@link KeyClientBuilder
* {@link KeyClientBuilder
*/
public KeyAsyncClient buildAsyncClient() {
Configuration buildConfiguration =
(configuration == null) ? Configuration.getGlobalConfiguration().clone() : configuration;
URL buildEndpoint = getBuildEndpoint(buildConfiguration);
if (buildEndpoint == null) {
throw logger
.logExceptionAsError(new IllegalStateException(KeyVaultErrorCodeStrings
.getErrorString(KeyVaultErrorCodeStrings.VAULT_END_POINT_REQUIRED)));
}
KeyServiceVersion serviceVersion = version != null ? version : KeyServiceVersion.getLatest();
if (pipeline != null) {
return new KeyAsyncClient(vaultUrl, pipeline, serviceVersion);
}
if (credential == null) {
throw logger.logExceptionAsError(
new IllegalStateException(KeyVaultErrorCodeStrings
.getErrorString(KeyVaultErrorCodeStrings.CREDENTIAL_REQUIRED)));
}
final List<HttpPipelinePolicy> policies = new ArrayList<>();
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
policies.add(new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion,
buildConfiguration));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy);
policies.add(new KeyVaultCredentialPolicy(credential));
policies.addAll(this.policies);
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
return new KeyAsyncClient(vaultUrl, pipeline, serviceVersion);
}
/**
* Sets the vault url to send HTTP requests to.
*
* @param vaultUrl The vault url is used as destination on Azure to send requests to.
* @return the updated ServiceClientBuilder object.
* @throws IllegalArgumentException if {@code vaultUrl} is null or it cannot be parsed into a valid URL.
*/
public KeyClientBuilder vaultUrl(String vaultUrl) {
try {
this.vaultUrl = new URL(vaultUrl);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"The Azure Key Vault url is malformed.", ex));
}
return this;
}
/**
* Sets the credential to use when authenticating HTTP requests.
*
* @param credential The credential to use for authenticating HTTP requests.
* @return the updated {@link KeyClientBuilder} object.
* @throws NullPointerException if {@code credential} is {@code null}.
*/
public KeyClientBuilder credential(TokenCredential credential) {
Objects.requireNonNull(credential);
this.credential = credential;
return this;
}
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p> If logLevel is not provided, default value of {@link HttpLogDetailLevel
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated {@link KeyClientBuilder} object.
*/
public KeyClientBuilder httpLogOptions(HttpLogOptions logOptions) {
httpLogOptions = logOptions;
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after {@link KeyAsyncClient} and {@link
* KeyClient} required policies.
*
* @param policy The {@link HttpPipelinePolicy policy} to be added.
* @return the updated {@link KeyClientBuilder} object.
* @throws NullPointerException if {@code policy} is {@code null}.
*/
public KeyClientBuilder addPolicy(HttpPipelinePolicy policy) {
Objects.requireNonNull(policy);
policies.add(policy);
return this;
}
/**
* Sets the HTTP client to use for sending and receiving requests to and from the service.
*
* @param client The HTTP client to use for requests.
* @return the updated {@link KeyClientBuilder} object.
* @throws NullPointerException If {@code client} is {@code null}.
*/
public KeyClientBuilder httpClient(HttpClient client) {
Objects.requireNonNull(client);
this.httpClient = client;
return this;
}
/**
* Sets the HTTP pipeline to use for the service client.
*
* If {@code pipeline} is set, all other settings are ignored, aside from
* {@link KeyClientBuilder
*
* @param pipeline The HTTP pipeline to use for sending service requests and receiving responses.
* @return the updated {@link KeyClientBuilder} object.
*/
public KeyClientBuilder pipeline(HttpPipeline pipeline) {
Objects.requireNonNull(pipeline);
this.pipeline = pipeline;
return this;
}
/**
* Sets the {@link KeyServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link KeyServiceVersion} of the service to be used when making requests.
* @return The updated KeyClientBuilder object.
*/
public KeyClientBuilder serviceVersion(KeyServiceVersion version) {
this.version = version;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated KeyClientBuilder object.
*/
public KeyClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link RetryPolicy} that is used when each request is sent.
*
* The default retry policy will be used in the pipeline, if not provided.
*
* @param retryPolicy user's retry policy applied to each request.
* @return The updated KeyClientBuilder object.
* @throws NullPointerException if the specified {@code retryPolicy} is null.
*/
private URL getBuildEndpoint(Configuration configuration) {
if (vaultUrl != null) {
return vaultUrl;
}
String configEndpoint = configuration.get("AZURE_KEYVAULT_ENDPOINT");
if (CoreUtils.isNullOrEmpty(configEndpoint)) {
return null;
}
try {
return new URL(configEndpoint);
} catch (MalformedURLException ex) {
return null;
}
}
} |
The options don't seem to be propagated here. | public ServiceBusReceiverAsyncClient buildAsyncClient() {
final MessagingEntityType entityType = validateEntityPaths(logger, connectionStringEntityName, topicName,
queueName);
final String entityPath;
switch (entityType) {
case QUEUE:
entityPath = queueName;
break;
case TOPIC:
if (isNullOrEmpty(subscriptionName)) {
throw logger.logExceptionAsError(new IllegalStateException(String.format(
"topicName (%s) must have a subscriptionName associated with it.", topicName)));
}
entityPath = topicName;
break;
default:
throw logger.logExceptionAsError(
new IllegalArgumentException("Unknown entity type: " + entityType));
}
if (prefetchCount < 1) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"prefetchCount (%s) cannot be less than 1.", prefetchCount)));
}
final MessageLockContainer messageLockContainer = new MessageLockContainer();
final ServiceBusConnectionProcessor connectionProcessor = getOrCreateConnectionProcessor(messageSerializer);
final ReceiverOptions receiverOptions = new ReceiverOptions(receiveMode, prefetchCount);
return new ServiceBusReceiverAsyncClient(connectionProcessor.getFullyQualifiedNamespace(), entityPath,
entityType, receiverOptions, connectionProcessor,
tracerProvider, messageSerializer, messageLockContainer, ServiceBusClientBuilder.this::onClientClose);
} | final ReceiverOptions receiverOptions = new ReceiverOptions(receiveMode, prefetchCount); | public ServiceBusReceiverAsyncClient buildAsyncClient() {
final MessagingEntityType entityType = validateEntityPaths(logger, connectionStringEntityName, topicName,
queueName);
final String entityPath;
switch (entityType) {
case QUEUE:
entityPath = queueName;
break;
case TOPIC:
if (isNullOrEmpty(subscriptionName)) {
throw logger.logExceptionAsError(new IllegalStateException(String.format(
"topicName (%s) must have a subscriptionName associated with it.", topicName)));
}
entityPath = topicName;
break;
default:
throw logger.logExceptionAsError(
new IllegalArgumentException("Unknown entity type: " + entityType));
}
if (prefetchCount < 1) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"prefetchCount (%s) cannot be less than 1.", prefetchCount)));
}
final MessageLockContainer messageLockContainer = new MessageLockContainer();
final ServiceBusConnectionProcessor connectionProcessor = getOrCreateConnectionProcessor(messageSerializer);
final ReceiverOptions receiverOptions = new ReceiverOptions(receiveMode, prefetchCount,
sessionId);
return new ServiceBusReceiverAsyncClient(connectionProcessor.getFullyQualifiedNamespace(), entityPath,
entityType, receiverOptions, connectionProcessor,
tracerProvider, messageSerializer, messageLockContainer, ServiceBusClientBuilder.this::onClientClose);
} | class ServiceBusReceiverClientBuilder {
private static final int DEFAULT_PREFETCH_COUNT = 1;
private int prefetchCount = DEFAULT_PREFETCH_COUNT;
private String queueName;
private String subscriptionName;
private String topicName;
private String sessionId;
private ReceiveMode receiveMode = ReceiveMode.PEEK_LOCK;
private ServiceBusReceiverClientBuilder() {
}
/**
* Sets the prefetch count of the receiver. Prefetch speeds up the message flow by aiming to have a message
* readily available for local retrieval when and before the application asks for one using {@link
* ServiceBusReceiverAsyncClient
* Setting the value to zero turns prefetch off. For both {@link ReceiveMode
* ReceiveMode
*
* @param prefetchCount The prefetch count.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder prefetchCount(int prefetchCount) {
this.prefetchCount = prefetchCount;
return this;
}
/**
* Sets the name of the queue to create a receiver for.
*
* @param queueName Name of the queue.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder queueName(String queueName) {
this.queueName = queueName;
return this;
}
/**
* Sets the receive mode for the receiver.
*
* @param receiveMode Mode for receiving messages.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder receiveMode(ReceiveMode receiveMode) {
this.receiveMode = receiveMode;
return this;
}
/**
* Sets the name of the subscription in the topic to listen to.
*
* @param subscriptionName Name of the subscription.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
* @see
*/
public ServiceBusReceiverClientBuilder subscriptionName(String subscriptionName) {
this.subscriptionName = subscriptionName;
return this;
}
/**
* Sets the name of the topic.
*
* @param topicName Name of the topic.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
* @see
*/
public ServiceBusReceiverClientBuilder topicName(String topicName) {
this.topicName = topicName;
return this;
}
/**
* Sets the session id.
*
* @param sessionId session id.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder sessionId(String sessionId) {
this.sessionId = sessionId;
return this;
}
/**
* Creates an <b>asynchronous</b> Service Bus receiver responsible for reading {@link ServiceBusMessage
* messages} from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverAsyncClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
/**
* Creates <b>synchronous</b> Service Bus receiver responsible for reading {@link ServiceBusMessage messages}
* from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
public ServiceBusReceiverClient buildClient() {
return new ServiceBusReceiverClient(buildAsyncClient(), retryOptions.getTryTimeout());
}
} | class ServiceBusReceiverClientBuilder {
private static final int DEFAULT_PREFETCH_COUNT = 1;
private int prefetchCount = DEFAULT_PREFETCH_COUNT;
private String queueName;
private String subscriptionName;
private String topicName;
private String sessionId;
private ReceiveMode receiveMode = ReceiveMode.PEEK_LOCK;
private ServiceBusReceiverClientBuilder() {
}
/**
* Sets the prefetch count of the receiver. Prefetch speeds up the message flow by aiming to have a message
* readily available for local retrieval when and before the application asks for one using {@link
* ServiceBusReceiverAsyncClient
* Setting the value to zero turns prefetch off. For both {@link ReceiveMode
* ReceiveMode
*
* @param prefetchCount The prefetch count.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder prefetchCount(int prefetchCount) {
this.prefetchCount = prefetchCount;
return this;
}
/**
* Sets the name of the queue to create a receiver for.
*
* @param queueName Name of the queue.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder queueName(String queueName) {
this.queueName = queueName;
return this;
}
/**
* Sets the receive mode for the receiver.
*
* @param receiveMode Mode for receiving messages.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder receiveMode(ReceiveMode receiveMode) {
this.receiveMode = receiveMode;
return this;
}
/**
* Sets the name of the subscription in the topic to listen to.
*
* @param subscriptionName Name of the subscription.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
* @see
*/
public ServiceBusReceiverClientBuilder subscriptionName(String subscriptionName) {
this.subscriptionName = subscriptionName;
return this;
}
/**
* Sets the name of the topic.
*
* @param topicName Name of the topic.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
* @see
*/
public ServiceBusReceiverClientBuilder topicName(String topicName) {
this.topicName = topicName;
return this;
}
/**
* Sets the session id.
*
* @param sessionId session id.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder sessionId(String sessionId) {
this.sessionId = sessionId;
return this;
}
/**
* Creates an <b>asynchronous</b> Service Bus receiver responsible for reading {@link ServiceBusMessage
* messages} from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverAsyncClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
/**
* Creates <b>synchronous</b> Service Bus receiver responsible for reading {@link ServiceBusMessage messages}
* from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
public ServiceBusReceiverClient buildClient() {
return new ServiceBusReceiverClient(buildAsyncClient(), retryOptions.getTryTimeout());
}
} |
Why is it always false? If later on, you're inferring it from the presence of a `sessionId`, the "enableSession" isn't required. | public ServiceBusReceiverAsyncClient buildAsyncClient() {
final MessagingEntityType entityType = validateEntityPaths(logger, connectionStringEntityName, topicName,
queueName);
final String entityPath;
switch (entityType) {
case QUEUE:
entityPath = queueName;
break;
case TOPIC:
if (isNullOrEmpty(subscriptionName)) {
throw logger.logExceptionAsError(new IllegalStateException(String.format(
"topicName (%s) must have a subscriptionName associated with it.", topicName)));
}
entityPath = topicName;
break;
default:
throw logger.logExceptionAsError(
new IllegalArgumentException("Unknown entity type: " + entityType));
}
if (prefetchCount < 1) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"prefetchCount (%s) cannot be less than 1.", prefetchCount)));
}
final MessageLockContainer messageLockContainer = new MessageLockContainer();
final ServiceBusConnectionProcessor connectionProcessor = getOrCreateConnectionProcessor(messageSerializer);
final ReceiverOptions receiverOptions = new ReceiverOptions(receiveMode, prefetchCount,
false, sessionId);
return new ServiceBusReceiverAsyncClient(connectionProcessor.getFullyQualifiedNamespace(), entityPath,
entityType, receiverOptions, connectionProcessor,
tracerProvider, messageSerializer, messageLockContainer, ServiceBusClientBuilder.this::onClientClose);
} | false, sessionId); | public ServiceBusReceiverAsyncClient buildAsyncClient() {
final MessagingEntityType entityType = validateEntityPaths(logger, connectionStringEntityName, topicName,
queueName);
final String entityPath;
switch (entityType) {
case QUEUE:
entityPath = queueName;
break;
case TOPIC:
if (isNullOrEmpty(subscriptionName)) {
throw logger.logExceptionAsError(new IllegalStateException(String.format(
"topicName (%s) must have a subscriptionName associated with it.", topicName)));
}
entityPath = topicName;
break;
default:
throw logger.logExceptionAsError(
new IllegalArgumentException("Unknown entity type: " + entityType));
}
if (prefetchCount < 1) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"prefetchCount (%s) cannot be less than 1.", prefetchCount)));
}
final MessageLockContainer messageLockContainer = new MessageLockContainer();
final ServiceBusConnectionProcessor connectionProcessor = getOrCreateConnectionProcessor(messageSerializer);
final ReceiverOptions receiverOptions = new ReceiverOptions(receiveMode, prefetchCount,
sessionId);
return new ServiceBusReceiverAsyncClient(connectionProcessor.getFullyQualifiedNamespace(), entityPath,
entityType, receiverOptions, connectionProcessor,
tracerProvider, messageSerializer, messageLockContainer, ServiceBusClientBuilder.this::onClientClose);
} | class ServiceBusReceiverClientBuilder {
private static final int DEFAULT_PREFETCH_COUNT = 1;
private int prefetchCount = DEFAULT_PREFETCH_COUNT;
private String queueName;
private String subscriptionName;
private String topicName;
private String sessionId;
private ReceiveMode receiveMode = ReceiveMode.PEEK_LOCK;
private ServiceBusReceiverClientBuilder() {
}
/**
* Sets the prefetch count of the receiver. Prefetch speeds up the message flow by aiming to have a message
* readily available for local retrieval when and before the application asks for one using {@link
* ServiceBusReceiverAsyncClient
* Setting the value to zero turns prefetch off. For both {@link ReceiveMode
* ReceiveMode
*
* @param prefetchCount The prefetch count.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder prefetchCount(int prefetchCount) {
this.prefetchCount = prefetchCount;
return this;
}
/**
* Sets the name of the queue to create a receiver for.
*
* @param queueName Name of the queue.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder queueName(String queueName) {
this.queueName = queueName;
return this;
}
/**
* Sets the receive mode for the receiver.
*
* @param receiveMode Mode for receiving messages.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder receiveMode(ReceiveMode receiveMode) {
this.receiveMode = receiveMode;
return this;
}
/**
* Sets the name of the subscription in the topic to listen to.
*
* @param subscriptionName Name of the subscription.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
* @see
*/
public ServiceBusReceiverClientBuilder subscriptionName(String subscriptionName) {
this.subscriptionName = subscriptionName;
return this;
}
/**
* Sets the name of the topic.
*
* @param topicName Name of the topic.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
* @see
*/
public ServiceBusReceiverClientBuilder topicName(String topicName) {
this.topicName = topicName;
return this;
}
/**
* Sets the session id.
*
* @param sessionId session id.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder sessionId(String sessionId) {
this.sessionId = sessionId;
return this;
}
/**
* Creates an <b>asynchronous</b> Service Bus receiver responsible for reading {@link ServiceBusMessage
* messages} from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverAsyncClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
/**
* Creates <b>synchronous</b> Service Bus receiver responsible for reading {@link ServiceBusMessage messages}
* from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
public ServiceBusReceiverClient buildClient() {
return new ServiceBusReceiverClient(buildAsyncClient(), retryOptions.getTryTimeout());
}
} | class ServiceBusReceiverClientBuilder {
private static final int DEFAULT_PREFETCH_COUNT = 1;
private int prefetchCount = DEFAULT_PREFETCH_COUNT;
private String queueName;
private String subscriptionName;
private String topicName;
private String sessionId;
private ReceiveMode receiveMode = ReceiveMode.PEEK_LOCK;
private ServiceBusReceiverClientBuilder() {
}
/**
* Sets the prefetch count of the receiver. Prefetch speeds up the message flow by aiming to have a message
* readily available for local retrieval when and before the application asks for one using {@link
* ServiceBusReceiverAsyncClient
* Setting the value to zero turns prefetch off. For both {@link ReceiveMode
* ReceiveMode
*
* @param prefetchCount The prefetch count.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder prefetchCount(int prefetchCount) {
this.prefetchCount = prefetchCount;
return this;
}
/**
* Sets the name of the queue to create a receiver for.
*
* @param queueName Name of the queue.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder queueName(String queueName) {
this.queueName = queueName;
return this;
}
/**
* Sets the receive mode for the receiver.
*
* @param receiveMode Mode for receiving messages.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder receiveMode(ReceiveMode receiveMode) {
this.receiveMode = receiveMode;
return this;
}
/**
* Sets the name of the subscription in the topic to listen to.
*
* @param subscriptionName Name of the subscription.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
* @see
*/
public ServiceBusReceiverClientBuilder subscriptionName(String subscriptionName) {
this.subscriptionName = subscriptionName;
return this;
}
/**
* Sets the name of the topic.
*
* @param topicName Name of the topic.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
* @see
*/
public ServiceBusReceiverClientBuilder topicName(String topicName) {
this.topicName = topicName;
return this;
}
/**
* Sets the session id.
*
* @param sessionId session id.
*
* @return The modified {@link ServiceBusReceiverClientBuilder} object.
*/
public ServiceBusReceiverClientBuilder sessionId(String sessionId) {
this.sessionId = sessionId;
return this;
}
/**
* Creates an <b>asynchronous</b> Service Bus receiver responsible for reading {@link ServiceBusMessage
* messages} from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverAsyncClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
/**
* Creates <b>synchronous</b> Service Bus receiver responsible for reading {@link ServiceBusMessage messages}
* from a specific queue or topic.
*
* @return An new {@link ServiceBusReceiverClient} that receives messages from a queue or topic.
* @throws IllegalStateException if {@link
* topicName} are not set or, both of these fields are set. It is also thrown if the Service Bus {@link
*
* {@link
*
* @throws IllegalArgumentException Queue or topic name are not set via {@link
* queueName()} or {@link
*/
public ServiceBusReceiverClient buildClient() {
return new ServiceBusReceiverClient(buildAsyncClient(), retryOptions.getTryTimeout());
}
} |
Should this be logger throw? | public static Flux<ByteBuffer> toFluxByteBuffer(InputStream inputStream) {
Pair pair = new Pair();
return Flux.just(true)
.repeat()
.map(ignore -> {
byte[] buffer = new byte[BYTE_BUFFER_CHUNK_SIZE];
try {
int numBytes = inputStream.read(buffer);
if (numBytes > 0) {
return pair.buffer(ByteBuffer.wrap(buffer, 0, numBytes)).readBytes(numBytes);
} else {
return pair.buffer(null).readBytes(numBytes);
}
} catch (IOException ioe) {
throw Exceptions.propagate(ioe);
}
})
.takeUntil(p -> p.readBytes() == -1)
.filter(p -> p.readBytes() > 0)
.map(Pair::buffer);
} | throw Exceptions.propagate(ioe); | public static Flux<ByteBuffer> toFluxByteBuffer(InputStream inputStream) {
Pair pair = new Pair();
return Flux.just(true)
.repeat()
.map(ignore -> {
byte[] buffer = new byte[BYTE_BUFFER_CHUNK_SIZE];
try {
int numBytes = inputStream.read(buffer);
if (numBytes > 0) {
return pair.buffer(ByteBuffer.wrap(buffer, 0, numBytes)).readBytes(numBytes);
} else {
return pair.buffer(null).readBytes(numBytes);
}
} catch (IOException ioe) {
throw LOGGER.logExceptionAsError(new RuntimeException(ioe));
}
})
.takeUntil(p -> p.readBytes() == -1)
.filter(p -> p.readBytes() > 0)
.map(Pair::buffer);
} | class Utility {
private static final ClientLogger LOGGER = new ClientLogger(Utility.class);
private static final int BYTE_BUFFER_CHUNK_SIZE = 4096;
private Utility() {
}
/**
* Creates a Flux of ByteBuffer, with each ByteBuffer wrapping bytes read from the given
* InputStream.
*
* @param inputStream InputStream to back the Flux
* @return Flux of ByteBuffer backed by the InputStream
*/
private static class Pair {
private ByteBuffer byteBuffer;
private int readBytes;
ByteBuffer buffer() {
return this.byteBuffer;
}
int readBytes() {
return this.readBytes;
}
Pair buffer(ByteBuffer byteBuffer) {
this.byteBuffer = byteBuffer;
return this;
}
Pair readBytes(int cnt) {
this.readBytes = cnt;
return this;
}
}
} | class Utility {
private static final ClientLogger LOGGER = new ClientLogger(Utility.class);
private static final int BYTE_BUFFER_CHUNK_SIZE = 4096;
private Utility() {
}
/**
* Automatically detect byte buffer's content type.
*
* Given the source: <a href="https:
*
* @param buffer The byte buffer input.
*
* @return The {@link Mono} of {@link ContentType} content type.
*/
public static Mono<ContentType> detectContentType(Flux<ByteBuffer> buffer) {
byte[] header = new byte[4];
int[] written = new int[] {0};
ContentType[] contentType = { ContentType.fromString("none")};
return buffer.map(chunk -> {
final int len = chunk.remaining();
for (int i = 0; i < len; i++) {
header[written[0]] = chunk.get(i);
written[0]++;
if (written[0] == 4) {
if (isJpeg(header)) {
contentType[0] = ContentType.IMAGE_JPEG;
} else if (isPdf(header)) {
contentType[0] = ContentType.APPLICATION_PDF;
} else if (isPng(header)) {
contentType[0] = ContentType.IMAGE_PNG;
} else if (isTiff(header)) {
contentType[0] = ContentType.IMAGE_TIFF;
}
return false;
}
}
return true;
})
.takeWhile(doContinue -> doContinue)
.then(Mono.defer(() -> {
if (contentType[0] != null) {
return Mono.just(contentType[0]);
} else {
return Mono.error(new RuntimeException("Content type could not be detected. "
+ "Should use other overload API that takes content type."));
}
}));
}
private static boolean isJpeg(byte[] header) {
return (header[0] == (byte) 0xff && header[1] == (byte) 0xd8);
}
private static boolean isPdf(byte[] header) {
return header[0] == (byte) 0x25
&& header[1] == (byte) 0x50
&& header[2] == (byte) 0x44
&& header[3] == (byte) 0x46;
}
private static boolean isPng(byte[] header) {
return header[0] == (byte) 0x89
&& header[1] == (byte) 0x50
&& header[2] == (byte) 0x4e
&& header[3] == (byte) 0x47;
}
private static boolean isTiff(byte[] header) {
return (header[0] == (byte) 0x49
&& header[1] == (byte) 0x49
&& header[2] == (byte) 0x2a
&& header[3] == (byte) 0x0)
|| (header[0] == (byte) 0x4d
&& header[1] == (byte) 0x4d
&& header[2] == (byte) 0x0
&& header[3] == (byte) 0x2a);
}
/**
* Creates a Flux of ByteBuffer, with each ByteBuffer wrapping bytes read from the given
* InputStream.
*
* @param inputStream InputStream to back the Flux
* @return Flux of ByteBuffer backed by the InputStream
*/
private static class Pair {
private ByteBuffer byteBuffer;
private int readBytes;
ByteBuffer buffer() {
return this.byteBuffer;
}
int readBytes() {
return this.readBytes;
}
Pair buffer(ByteBuffer byteBuffer) {
this.byteBuffer = byteBuffer;
return this;
}
Pair readBytes(int cnt) {
this.readBytes = cnt;
return this;
}
}
/**
* Extracts the result ID from the URL.
*
* @param operationLocation The URL specified in the 'Operation-Location' response header containing the
* resultId used to track the progress and obtain the result of the analyze operation.
*
* @return The resultId used to track the progress.
*/
public static String parseModelId(String operationLocation) {
if (!CoreUtils.isNullOrEmpty(operationLocation)) {
int lastIndex = operationLocation.lastIndexOf('/');
if (lastIndex != -1) {
return operationLocation.substring(lastIndex + 1);
}
}
throw LOGGER.logExceptionAsError(
new RuntimeException("Failed to parse operation header for result Id from: " + operationLocation));
}
} |
updated | public static Flux<ByteBuffer> toFluxByteBuffer(InputStream inputStream) {
Pair pair = new Pair();
return Flux.just(true)
.repeat()
.map(ignore -> {
byte[] buffer = new byte[BYTE_BUFFER_CHUNK_SIZE];
try {
int numBytes = inputStream.read(buffer);
if (numBytes > 0) {
return pair.buffer(ByteBuffer.wrap(buffer, 0, numBytes)).readBytes(numBytes);
} else {
return pair.buffer(null).readBytes(numBytes);
}
} catch (IOException ioe) {
throw Exceptions.propagate(ioe);
}
})
.takeUntil(p -> p.readBytes() == -1)
.filter(p -> p.readBytes() > 0)
.map(Pair::buffer);
} | throw Exceptions.propagate(ioe); | public static Flux<ByteBuffer> toFluxByteBuffer(InputStream inputStream) {
Pair pair = new Pair();
return Flux.just(true)
.repeat()
.map(ignore -> {
byte[] buffer = new byte[BYTE_BUFFER_CHUNK_SIZE];
try {
int numBytes = inputStream.read(buffer);
if (numBytes > 0) {
return pair.buffer(ByteBuffer.wrap(buffer, 0, numBytes)).readBytes(numBytes);
} else {
return pair.buffer(null).readBytes(numBytes);
}
} catch (IOException ioe) {
throw LOGGER.logExceptionAsError(new RuntimeException(ioe));
}
})
.takeUntil(p -> p.readBytes() == -1)
.filter(p -> p.readBytes() > 0)
.map(Pair::buffer);
} | class Utility {
private static final ClientLogger LOGGER = new ClientLogger(Utility.class);
private static final int BYTE_BUFFER_CHUNK_SIZE = 4096;
private Utility() {
}
/**
* Creates a Flux of ByteBuffer, with each ByteBuffer wrapping bytes read from the given
* InputStream.
*
* @param inputStream InputStream to back the Flux
* @return Flux of ByteBuffer backed by the InputStream
*/
private static class Pair {
private ByteBuffer byteBuffer;
private int readBytes;
ByteBuffer buffer() {
return this.byteBuffer;
}
int readBytes() {
return this.readBytes;
}
Pair buffer(ByteBuffer byteBuffer) {
this.byteBuffer = byteBuffer;
return this;
}
Pair readBytes(int cnt) {
this.readBytes = cnt;
return this;
}
}
} | class Utility {
private static final ClientLogger LOGGER = new ClientLogger(Utility.class);
private static final int BYTE_BUFFER_CHUNK_SIZE = 4096;
private Utility() {
}
/**
* Automatically detect byte buffer's content type.
*
* Given the source: <a href="https:
*
* @param buffer The byte buffer input.
*
* @return The {@link Mono} of {@link ContentType} content type.
*/
public static Mono<ContentType> detectContentType(Flux<ByteBuffer> buffer) {
byte[] header = new byte[4];
int[] written = new int[] {0};
ContentType[] contentType = { ContentType.fromString("none")};
return buffer.map(chunk -> {
final int len = chunk.remaining();
for (int i = 0; i < len; i++) {
header[written[0]] = chunk.get(i);
written[0]++;
if (written[0] == 4) {
if (isJpeg(header)) {
contentType[0] = ContentType.IMAGE_JPEG;
} else if (isPdf(header)) {
contentType[0] = ContentType.APPLICATION_PDF;
} else if (isPng(header)) {
contentType[0] = ContentType.IMAGE_PNG;
} else if (isTiff(header)) {
contentType[0] = ContentType.IMAGE_TIFF;
}
return false;
}
}
return true;
})
.takeWhile(doContinue -> doContinue)
.then(Mono.defer(() -> {
if (contentType[0] != null) {
return Mono.just(contentType[0]);
} else {
return Mono.error(new RuntimeException("Content type could not be detected. "
+ "Should use other overload API that takes content type."));
}
}));
}
private static boolean isJpeg(byte[] header) {
return (header[0] == (byte) 0xff && header[1] == (byte) 0xd8);
}
private static boolean isPdf(byte[] header) {
return header[0] == (byte) 0x25
&& header[1] == (byte) 0x50
&& header[2] == (byte) 0x44
&& header[3] == (byte) 0x46;
}
private static boolean isPng(byte[] header) {
return header[0] == (byte) 0x89
&& header[1] == (byte) 0x50
&& header[2] == (byte) 0x4e
&& header[3] == (byte) 0x47;
}
private static boolean isTiff(byte[] header) {
return (header[0] == (byte) 0x49
&& header[1] == (byte) 0x49
&& header[2] == (byte) 0x2a
&& header[3] == (byte) 0x0)
|| (header[0] == (byte) 0x4d
&& header[1] == (byte) 0x4d
&& header[2] == (byte) 0x0
&& header[3] == (byte) 0x2a);
}
/**
* Creates a Flux of ByteBuffer, with each ByteBuffer wrapping bytes read from the given
* InputStream.
*
* @param inputStream InputStream to back the Flux
* @return Flux of ByteBuffer backed by the InputStream
*/
private static class Pair {
private ByteBuffer byteBuffer;
private int readBytes;
ByteBuffer buffer() {
return this.byteBuffer;
}
int readBytes() {
return this.readBytes;
}
Pair buffer(ByteBuffer byteBuffer) {
this.byteBuffer = byteBuffer;
return this;
}
Pair readBytes(int cnt) {
this.readBytes = cnt;
return this;
}
}
/**
* Extracts the result ID from the URL.
*
* @param operationLocation The URL specified in the 'Operation-Location' response header containing the
* resultId used to track the progress and obtain the result of the analyze operation.
*
* @return The resultId used to track the progress.
*/
public static String parseModelId(String operationLocation) {
if (!CoreUtils.isNullOrEmpty(operationLocation)) {
int lastIndex = operationLocation.lastIndexOf('/');
if (lastIndex != -1) {
return operationLocation.substring(lastIndex + 1);
}
}
throw LOGGER.logExceptionAsError(
new RuntimeException("Failed to parse operation header for result Id from: " + operationLocation));
}
} |
just 'ModelInfo' should be good | static CustomFormModel toCustomFormModel(Model modelResponse) {
com.azure.ai.formrecognizer.implementation.models.ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("Invalid status Model Id."));
}
List<FormRecognizerError> trainResultErrors = new ArrayList<>();
if (modelResponse.getTrainResult().getErrors() != null) {
trainResultErrors = setTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<TrainingDocumentInfo> trainingDocumentInfoList = new ArrayList<>();
modelResponse.getTrainResult().getTrainingDocuments().forEach(trainingDocumentItem -> {
List<FormRecognizerError> documentErrors = new ArrayList<>();
if (trainingDocumentItem.getErrors() != null) {
documentErrors = setTrainingErrors(trainingDocumentItem.getErrors());
}
TrainingDocumentInfo trainingDocumentInfo = new TrainingDocumentInfo(trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()), trainingDocumentItem.getPages(),
documentErrors);
trainingDocumentInfoList.add(trainingDocumentInfo);
});
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
CustomFormSubModel customFormSubModel = new CustomFormSubModel(null, fieldMap,
formType + clusterKey);
subModelList.add(customFormSubModel);
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap, formType + modelInfo.getModelId()));
}
return new CustomFormModel(modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
trainResultErrors, trainingDocumentInfoList);
} | } | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
private static List<FormRecognizerError> setTrainingErrors(List<ErrorInformation> trainingErrorList) {
List<FormRecognizerError> formRecognizerErrorList = new ArrayList<>();
trainingErrorList.forEach(errorInformation ->
formRecognizerErrorList.add(new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())));
return formRecognizerErrorList;
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
If the underlying polling results in an error state/invalid completion state what does this `IterableStream` end up being? | public void extractReceipt() {
String receiptSourceUrl = "https:
SyncPoller<OperationResult, IterableStream<RecognizedReceipt>> syncPoller =
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptSourceUrl);
IterableStream<RecognizedReceipt> receiptPageResults = syncPoller.getFinalResult();
receiptPageResults.forEach(recognizedReceipt -> {
USReceipt usReceipt = ReceiptExtensions.asUSReceipt(recognizedReceipt);
System.out.printf("Page Number: %s%n", usReceipt.getMerchantName().getPageNumber());
System.out.printf("Merchant Name %s%n", usReceipt.getMerchantName().getName());
System.out.printf("Merchant Name Value: %s%n", usReceipt.getMerchantName().getFieldValue());
System.out.printf("Merchant Address %s%n", usReceipt.getMerchantAddress().getName());
System.out.printf("Merchant Address Value: %s%n", usReceipt.getMerchantAddress().getFieldValue());
System.out.printf("Merchant Phone Number %s%n", usReceipt.getMerchantPhoneNumber().getName());
System.out.printf("Merchant Phone Number Value: %s%n", usReceipt.getMerchantPhoneNumber().getFieldValue());
System.out.printf("Total: %s%n", usReceipt.getTotal().getName());
System.out.printf("Total Value: %s%n", usReceipt.getTotal().getFieldValue());
});
} | IterableStream<RecognizedReceipt> receiptPageResults = syncPoller.getFinalResult(); | public void extractReceipt() {
String receiptSourceUrl = "https:
SyncPoller<OperationResult, IterableStream<RecognizedReceipt>> syncPoller =
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptSourceUrl);
IterableStream<RecognizedReceipt> receiptPageResults = syncPoller.getFinalResult();
receiptPageResults.forEach(recognizedReceipt -> {
USReceipt usReceipt = ReceiptExtensions.asUSReceipt(recognizedReceipt);
System.out.printf("Page Number: %s%n", usReceipt.getMerchantName().getPageNumber());
System.out.printf("Merchant Name %s%n", usReceipt.getMerchantName().getName());
System.out.printf("Merchant Name Value: %s%n", usReceipt.getMerchantName().getFieldValue());
System.out.printf("Merchant Address %s%n", usReceipt.getMerchantAddress().getName());
System.out.printf("Merchant Address Value: %s%n", usReceipt.getMerchantAddress().getFieldValue());
System.out.printf("Merchant Phone Number %s%n", usReceipt.getMerchantPhoneNumber().getName());
System.out.printf("Merchant Phone Number Value: %s%n", usReceipt.getMerchantPhoneNumber().getFieldValue());
System.out.printf("Total: %s%n", usReceipt.getTotal().getName());
System.out.printf("Total Value: %s%n", usReceipt.getTotal().getFieldValue());
});
} | class ReadmeSamples {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for configuring http client.
*/
public void configureHttpClient() {
HttpClient client = new NettyAsyncHttpClientBuilder()
.port(8080)
.wiretap(true)
.build();
}
/**
* Code snippet for getting sync client using the API key authentication.
*/
public void useApiKeySyncClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for getting async client using API key authentication.
*/
public void useApiKeyAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for rotating API key of the client
*/
public void rotatingApiKey() {
AzureKeyCredential credential = new AzureKeyCredential("{api_key}");
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.apiKey(credential)
.endpoint("{endpoint}")
.buildClient();
credential.update("{new_api_key}");
}
} | class ReadmeSamples {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for configuring http client.
*/
public void configureHttpClient() {
HttpClient client = new NettyAsyncHttpClientBuilder()
.port(8080)
.wiretap(true)
.build();
}
/**
* Code snippet for getting sync client using the API key authentication.
*/
public void useApiKeySyncClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for getting async client using API key authentication.
*/
public void useApiKeyAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for rotating API key of the client
*/
public void rotatingApiKey() {
AzureKeyCredential credential = new AzureKeyCredential("{api_key}");
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.apiKey(credential)
.endpoint("{endpoint}")
.buildClient();
credential.update("{new_api_key}");
}
} |
Should this check for `null`, empty, and less than eight elements? | private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox)) {
return null;
}
Point topLeft = new Point(serviceBoundingBox.get(0), serviceBoundingBox.get(1));
Point topRight = new Point(serviceBoundingBox.get(2), serviceBoundingBox.get(3));
Point bottomLeft = new Point(serviceBoundingBox.get(4), serviceBoundingBox.get(5));
Point bottomRight = new Point(serviceBoundingBox.get(6), serviceBoundingBox.get(7));
return new BoundingBox(Arrays.asList(topLeft, topRight, bottomLeft, bottomRight));
} | if (CoreUtils.isNullOrEmpty(serviceBoundingBox)) { | private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern COMPILE = Pattern.compile("[^0-9]+");
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap = null;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (int i = 0; i < documentResults.size(); i++) {
DocumentResult documentResultItem = documentResults.get(i);
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults, includeTextDetails);
}
extractedFormList.add(new RecognizedForm(extractedFieldMap, formType.get(), pageRange.get(),
new IterableStream<FormPage>(formPages.subList(pageRange.get().getStartPageNumber(), pageRange.get().getEndPageNumber()))));
}
if (pageResults != null) {
for (int i = 0; i < pageResults.size(); i++) {
PageResult pageResultItem = pageResults.get(i);
Integer pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getUnlabeledFieldMap(includeTextDetails, readResults, pageResultItem, pageNumber);
}
extractedFormList.add(
new RecognizedForm(extractedFieldMap, formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<FormPage>(Arrays.asList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<RecognizedReceipt> extractedReceiptList =
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm)).collect(Collectors.toList());
return new IterableStream<>(extractedReceiptList);
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
for (int i = 0; i < readResults.size(); i++) {
ReadResult readResultItem = readResults.get(i);
PageResult pageResultItem;
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
pageResultItem = pageResults.get(i);
perPageTableList = getPageTables(pageResultItem, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}
return formPages;
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails && !(CoreUtils.isNullOrEmpty(fieldValue.getElements()))) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), null, pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue, FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<LocalDate>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<Integer>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<List<FormField<?>>>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<Map<String, FormField<?>>>(fieldValue.getConfidence(), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText, pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to convert the service returned {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject, Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> {
IterableStream<FormContent> formValueContentList = null;
if (!CoreUtils.isNullOrEmpty(fieldValue.getElements())) {
formValueContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(), formValueContentList),
fieldValue.getPage(), readResults));
});
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream().map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults)).collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(readResultItem.getHeight(), readResultItem.getAngle(), DimensionUnit.fromString(readResultItem.getUnit().toString()), readResultItem.getWidth(), perPageLineList,
perPageTableList
);
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
List<FormTableCell> tableCellList = dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(), null,
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isHeader(),
pageNumber))
.collect(Collectors.toList());
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
List<FormLine> formLines = readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
return formLines;
}
/**
* Helper method to set the {@link RecognizedForm
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
static Map<String, FormField<?>> getUnlabeledFieldMap(boolean includeTextDetails, List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
for (int i = 0; i < keyValuePairs.size(); i++) {
KeyValuePair keyValuePair = keyValuePairs.get(i);
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails && !CoreUtils.isNullOrEmpty(keyValuePair.getValue().getElements())) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + i;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(), toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(), toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(keyValuePair.getConfidence(), labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements, List<ReadResult> readResults, Integer pageNumber) {
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = COMPILE.matcher(elementString).replaceAll(" ").trim().split(" ");
int readResultIndex, lineIndex;
if (indices.length >= 1) {
readResultIndex = Integer.parseInt(indices[0]);
lineIndex = Integer.parseInt(indices[1]);
} else {
throw LOGGER.logExceptionAsError(new RuntimeException("Reference Elements not found"));
}
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord = readResults.get(readResultIndex).getLines().get(lineIndex).getWords()
.get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber, textWord.getConfidence());
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()), pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
List<FormWord> extractedWordList = words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
textWord.getConfidence())).collect(Collectors.toList());
return new IterableStream<FormWord>(extractedWordList);
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
} |
Since `BoundingBox` infers a box shape, four corners, should it accept the specific points as parameters in the constructor? Unless `BoundingBox` is meant to be more generic and should be renamed `BoundingArea`. Magic parameter ordering in lists is generally a confusing API design. | private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox)) {
return null;
}
Point topLeft = new Point(serviceBoundingBox.get(0), serviceBoundingBox.get(1));
Point topRight = new Point(serviceBoundingBox.get(2), serviceBoundingBox.get(3));
Point bottomLeft = new Point(serviceBoundingBox.get(4), serviceBoundingBox.get(5));
Point bottomRight = new Point(serviceBoundingBox.get(6), serviceBoundingBox.get(7));
return new BoundingBox(Arrays.asList(topLeft, topRight, bottomLeft, bottomRight));
} | return new BoundingBox(Arrays.asList(topLeft, topRight, bottomLeft, bottomRight)); | private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern COMPILE = Pattern.compile("[^0-9]+");
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap = null;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (int i = 0; i < documentResults.size(); i++) {
DocumentResult documentResultItem = documentResults.get(i);
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults, includeTextDetails);
}
extractedFormList.add(new RecognizedForm(extractedFieldMap, formType.get(), pageRange.get(),
new IterableStream<FormPage>(formPages.subList(pageRange.get().getStartPageNumber(), pageRange.get().getEndPageNumber()))));
}
if (pageResults != null) {
for (int i = 0; i < pageResults.size(); i++) {
PageResult pageResultItem = pageResults.get(i);
Integer pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getUnlabeledFieldMap(includeTextDetails, readResults, pageResultItem, pageNumber);
}
extractedFormList.add(
new RecognizedForm(extractedFieldMap, formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<FormPage>(Arrays.asList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<RecognizedReceipt> extractedReceiptList =
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm)).collect(Collectors.toList());
return new IterableStream<>(extractedReceiptList);
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
for (int i = 0; i < readResults.size(); i++) {
ReadResult readResultItem = readResults.get(i);
PageResult pageResultItem;
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
pageResultItem = pageResults.get(i);
perPageTableList = getPageTables(pageResultItem, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}
return formPages;
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails && !(CoreUtils.isNullOrEmpty(fieldValue.getElements()))) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), null, pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue, FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<LocalDate>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<Integer>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<List<FormField<?>>>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<Map<String, FormField<?>>>(fieldValue.getConfidence(), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText, pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to convert the service returned {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject, Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> {
IterableStream<FormContent> formValueContentList = null;
if (!CoreUtils.isNullOrEmpty(fieldValue.getElements())) {
formValueContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(), formValueContentList),
fieldValue.getPage(), readResults));
});
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream().map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults)).collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(readResultItem.getHeight(), readResultItem.getAngle(), DimensionUnit.fromString(readResultItem.getUnit().toString()), readResultItem.getWidth(), perPageLineList,
perPageTableList
);
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
List<FormTableCell> tableCellList = dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(), null,
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isHeader(),
pageNumber))
.collect(Collectors.toList());
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
List<FormLine> formLines = readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
return formLines;
}
/**
* Helper method to set the {@link RecognizedForm
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
static Map<String, FormField<?>> getUnlabeledFieldMap(boolean includeTextDetails, List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
for (int i = 0; i < keyValuePairs.size(); i++) {
KeyValuePair keyValuePair = keyValuePairs.get(i);
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails && !CoreUtils.isNullOrEmpty(keyValuePair.getValue().getElements())) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + i;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(), toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(), toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(keyValuePair.getConfidence(), labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements, List<ReadResult> readResults, Integer pageNumber) {
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = COMPILE.matcher(elementString).replaceAll(" ").trim().split(" ");
int readResultIndex, lineIndex;
if (indices.length >= 1) {
readResultIndex = Integer.parseInt(indices[0]);
lineIndex = Integer.parseInt(indices[1]);
} else {
throw LOGGER.logExceptionAsError(new RuntimeException("Reference Elements not found"));
}
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord = readResults.get(readResultIndex).getLines().get(lineIndex).getWords()
.get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber, textWord.getConfidence());
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()), pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
List<FormWord> extractedWordList = words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
textWord.getConfidence())).collect(Collectors.toList());
return new IterableStream<FormWord>(extractedWordList);
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
} |
Could make this a little more performant by hard coding in `ModelStatus.INVALID`'s `toString` value since that is the only time we will reach this code path. ```java new IllegalArgumentException(String.format("Model Id %s returned with invalid status", modelInfo.getModelId()) ``` | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
IterableStream<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | modelInfo.getModelId(), modelInfo.getStatus()))); | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static IterableStream<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new IterableStream<FormRecognizerError>(Collections.emptyList());
} else {
return new IterableStream<>(trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList()));
}
}
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param list A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> list) {
CollectionTransformer<ModelInfo, CustomFormModelInfo> transformer =
new CollectionTransformer<ModelInfo, CustomFormModelInfo>() {
@Override
CustomFormModelInfo transform(ModelInfo modelInfo) {
return new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime());
}
};
return transformer.transform(list);
}
/**
* A generic transformation class for collection that transform from type {@code E} to type {@code F}.
*
* @param <E> Transform type E to another type.
* @param <F> Transform to type F from another type.
*/
abstract static class CollectionTransformer<E, F> {
abstract F transform(E e);
List<F> transform(List<E> list) {
List<F> newList = new ArrayList<>();
for (E e : list) {
newList.add(transform(e));
}
return newList;
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Same here - this can be created from the builder instead. | public FormTrainingClient getFormTrainingClient() {
return new FormTrainingClient(client.getFormTrainingAsyncClient());
} | } | public FormTrainingClient getFormTrainingClient() {
return new FormTrainingClient(client.getFormTrainingAsyncClient());
} | class FormRecognizerClient {
private final FormRecognizerAsyncClient client;
/**
* Create a {@link FormRecognizerClient client} that sends requests to the Form Recognizer service's endpoint.
* Each service call goes through the {@link FormRecognizerClientBuilder
*
* @param client The {@link FormRecognizerClient} that the client routes its request through.
*/
FormRecognizerClient(FormRecognizerAsyncClient client) {
this.client = client;
}
/**
* Creates a new {@link FormTrainingClient} object.The new {@code FormRecognizerClient} uses the same request policy
* pipeline as the {@code FormRecognizerClient}.
*
* @return A new {@link FormTrainingClient} object.
*/
/**
* Recognizes and extracts receipt data from documents using optical character recognition (OCR)
* and a custom trained model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param modelId The custom trained model Id to be used.
*
* @return A {@link SyncPoller} to poll the progress of the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginExtractCustomFormsFromUrl(String fileSourceUrl, String modelId) {
return beginExtractCustomFormsFromUrl(fileSourceUrl, modelId, false, null);
}
/**
* Recognizes and extracts receipt data from documents using optical character recognition (OCR)
* and a custom trained model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param modelId The custom trained model Id to be used.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} to poll the progress of the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginExtractCustomFormsFromUrl(String fileSourceUrl, String modelId, boolean includeTextDetails,
Duration pollInterval) {
return client.beginRecognizeCustomFormsFromUrl(fileSourceUrl, modelId, includeTextDetails, pollInterval)
.getSyncPoller();
}
/**
* Recognizes and extracts form data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param modelId The custom trained model Id to be used.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomForms(Flux<ByteBuffer> data, String modelId, long length, FormContentType formContentType) {
return beginRecognizeCustomForms(data, modelId, length, formContentType, false, null);
}
/**
* Recognizes and extracts form data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param modelId The custom trained model Id to be used.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param includeTextDetails Include text lines and element references in the result.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomForms(Flux<ByteBuffer> data, String modelId, long length, FormContentType formContentType,
boolean includeTextDetails, Duration pollInterval) {
return client.beginRecognizeCustomForms(data, modelId, length, formContentType,
includeTextDetails, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts layout data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
*
* @return A {@link SyncPoller} that polls the extract layout form operation until it has completed, has failed,
* or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<FormPage>> beginRecognizeContentFromUrl(String fileSourceUrl) {
return beginRecognizeContentFromUrl(fileSourceUrl, null);
}
/**
* Recognizes and extracts layout data using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed, has
* failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContentFromUrl(String sourceUrl, Duration pollInterval) {
return client.beginRecognizeContentFromUrl(sourceUrl, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts layout data using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed, has failed, or has
* been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContent(InputStream data, long length, FormContentType formContentType) {
return beginRecognizeContent(data, length, formContentType, null);
}
/**
* Recognizes and extracts layout data from the provided document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContent(InputStream data, long length, FormContentType formContentType,
Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeContent(buffer, formContentType, length, pollInterval)
.getSyncPoller();
}
/**
* Recognizes and extracts receipt data from documentsusing optical character recognition (OCR) and a
* prebuilt receipt trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
*
* @return A {@link SyncPoller} to poll the progress of the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceiptsFromUrl(String sourceUrl) {
return beginRecognizeReceiptsFromUrl(sourceUrl, false, null);
}
/**
* Recognizes and extracts receipt data from documentsusing optical character recognition (OCR) and a
* prebuilt receipt trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} to poll the progress of the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceiptsFromUrl(String sourceUrl, boolean includeTextDetails, Duration pollInterval) {
return client.beginRecognizeReceiptsFromUrl(sourceUrl, includeTextDetails, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts data from the provided document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceipts(InputStream data, long length, FormContentType formContentType) {
return beginRecognizeReceipts(data, length, formContentType, false, null);
}
/**
* Recognizes and extracts data from the providedd document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract receipt operation until it
* has completed, has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceipts(InputStream data, long length, FormContentType formContentType,
boolean includeTextDetails, Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeReceipts(buffer, length, formContentType, includeTextDetails, pollInterval)
.getSyncPoller();
}
} | class FormRecognizerClient {
private final FormRecognizerAsyncClient client;
/**
* Create a {@link FormRecognizerClient client} that sends requests to the Form Recognizer service's endpoint.
* Each service call goes through the {@link FormRecognizerClientBuilder
*
* @param client The {@link FormRecognizerClient} that the client routes its request through.
*/
FormRecognizerClient(FormRecognizerAsyncClient client) {
this.client = client;
}
/**
* Creates a new {@link FormTrainingClient} object.The new {@code FormRecognizerClient} uses the same request policy
* pipeline as the {@code FormRecognizerClient}.
*
* @return A new {@link FormTrainingClient} object.
*/
/**
* Recognizes and extracts receipt data from documents using optical character recognition (OCR)
* and a custom trained model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param modelId The UUID string format custom trained model Id to be used.
*
* @return A {@link SyncPoller} to poll the progress of the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomFormsFromUrl(String fileSourceUrl, String modelId) {
return beginRecognizeCustomFormsFromUrl(fileSourceUrl, modelId, false, null);
}
/**
* Recognizes and extracts receipt data from documents using optical character recognition (OCR)
* and a custom trained model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param modelId The UUID string format custom trained model Id to be used.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} to poll the progress of the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomFormsFromUrl(String fileSourceUrl, String modelId, boolean includeTextDetails,
Duration pollInterval) {
return client.beginRecognizeCustomFormsFromUrl(fileSourceUrl, modelId, includeTextDetails, pollInterval)
.getSyncPoller();
}
/**
* Recognizes and extracts form data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param modelId The UUID string format custom trained model Id to be used.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomForms(InputStream data, String modelId, long length, FormContentType formContentType) {
return beginRecognizeCustomForms(data, modelId, length, formContentType, false, null);
}
/**
* Recognizes and extracts form data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param modelId The UUID string format custom trained model Id to be used.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomForms(InputStream data, String modelId, long length, FormContentType formContentType,
boolean includeTextDetails, Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeCustomForms(buffer, modelId, length, formContentType,
includeTextDetails, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts layout data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
*
* @return A {@link SyncPoller} that polls the extract layout form operation until it has completed, has failed,
* or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<FormPage>> beginRecognizeContentFromUrl(String fileSourceUrl) {
return beginRecognizeContentFromUrl(fileSourceUrl, null);
}
/**
* Recognizes and extracts layout data using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed, has
* failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContentFromUrl(String sourceUrl, Duration pollInterval) {
return client.beginRecognizeContentFromUrl(sourceUrl, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts layout data using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed, has failed, or has
* been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContent(InputStream data, long length, FormContentType formContentType) {
return beginRecognizeContent(data, length, formContentType, null);
}
/**
* Recognizes and extracts layout data from the provided document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContent(InputStream data, long length, FormContentType formContentType,
Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeContent(buffer, formContentType, length, pollInterval)
.getSyncPoller();
}
/**
* Recognizes and extracts receipt data from documentsusing optical character recognition (OCR) and a
* prebuilt receipt trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
*
* @return A {@link SyncPoller} to poll the progress of the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceiptsFromUrl(String sourceUrl) {
return beginRecognizeReceiptsFromUrl(sourceUrl, false, null);
}
/**
* Recognizes and extracts receipt data from documentsusing optical character recognition (OCR) and a
* prebuilt receipt trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} to poll the progress of the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceiptsFromUrl(String sourceUrl, boolean includeTextDetails, Duration pollInterval) {
return client.beginRecognizeReceiptsFromUrl(sourceUrl, includeTextDetails, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts data from the provided document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceipts(InputStream data, long length, FormContentType formContentType) {
return beginRecognizeReceipts(data, length, formContentType, false, null);
}
/**
* Recognizes and extracts data from the providedd document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract receipt operation until it
* has completed, has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceipts(InputStream data, long length, FormContentType formContentType,
boolean includeTextDetails, Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeReceipts(buffer, length, formContentType, includeTextDetails, pollInterval)
.getSyncPoller();
}
} |
Should move the instantiation of `USReceiptItem` outside of the inner for loop, this will construct it when the entire `entrySet` has been processed and will churn a lot of short lived objects. | private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = new ArrayList<>();
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
USReceiptItem receiptItem = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
receiptItem = new USReceiptItem(name, quantity, price, totalPrice);
}
receiptItemList.add(receiptItem);
}
return receiptItemList;
} | receiptItem = new USReceiptItem(name, quantity, price, totalPrice); | private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
receiptItemList = new ArrayList<>();
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
}
receiptItemList.add(new USReceiptItem(name, quantity, price, totalPrice));
}
return receiptItemList;
} | class ReceiptExtensions {
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
}
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
} | class ReceiptExtensions {
private ReceiptExtensions() {
}
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
@SuppressWarnings("unchecked")
public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
}
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
@SuppressWarnings("unchecked")
} |
It may be more performant to use `Collections.singletonList` here, note that this will create an unmodifiable list so if this will be added onto that will throw an exception. | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap = null;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (int i = 0; i < documentResults.size(); i++) {
DocumentResult documentResultItem = documentResults.get(i);
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults, includeTextDetails);
}
extractedFormList.add(new RecognizedForm(extractedFieldMap, formType.get(), pageRange.get(),
new IterableStream<FormPage>(formPages.subList(pageRange.get().getStartPageNumber(), pageRange.get().getEndPageNumber()))));
}
if (pageResults != null) {
for (int i = 0; i < pageResults.size(); i++) {
PageResult pageResultItem = pageResults.get(i);
Integer pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getUnlabeledFieldMap(includeTextDetails, readResults, pageResultItem, pageNumber);
}
extractedFormList.add(
new RecognizedForm(extractedFieldMap, formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<FormPage>(Arrays.asList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | new IterableStream<FormPage>(Arrays.asList(formPages.get(pageNumber - 1))))); | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern COMPILE = Pattern.compile("[^0-9]+");
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<RecognizedReceipt> extractedReceiptList =
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm)).collect(Collectors.toList());
return new IterableStream<>(extractedReceiptList);
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
for (int i = 0; i < readResults.size(); i++) {
ReadResult readResultItem = readResults.get(i);
PageResult pageResultItem;
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
pageResultItem = pageResults.get(i);
perPageTableList = getPageTables(pageResultItem, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}
return formPages;
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails && !(CoreUtils.isNullOrEmpty(fieldValue.getElements()))) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), null, pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue, FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<LocalDate>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<Integer>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<List<FormField<?>>>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<Map<String, FormField<?>>>(fieldValue.getConfidence(), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText, pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to convert the service returned {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject, Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> {
IterableStream<FormContent> formValueContentList = null;
if (!CoreUtils.isNullOrEmpty(fieldValue.getElements())) {
formValueContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(), formValueContentList),
fieldValue.getPage(), readResults));
});
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream().map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults)).collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(readResultItem.getHeight(), readResultItem.getAngle(), DimensionUnit.fromString(readResultItem.getUnit().toString()), readResultItem.getWidth(), perPageLineList,
perPageTableList
);
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
List<FormTableCell> tableCellList = dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(), null,
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isHeader(),
pageNumber))
.collect(Collectors.toList());
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
List<FormLine> formLines = readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
return formLines;
}
/**
* Helper method to set the {@link RecognizedForm
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
static Map<String, FormField<?>> getUnlabeledFieldMap(boolean includeTextDetails, List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
for (int i = 0; i < keyValuePairs.size(); i++) {
KeyValuePair keyValuePair = keyValuePairs.get(i);
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails && !CoreUtils.isNullOrEmpty(keyValuePair.getValue().getElements())) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + i;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(), toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(), toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(keyValuePair.getConfidence(), labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements, List<ReadResult> readResults, Integer pageNumber) {
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = COMPILE.matcher(elementString).replaceAll(" ").trim().split(" ");
int readResultIndex, lineIndex;
if (indices.length >= 1) {
readResultIndex = Integer.parseInt(indices[0]);
lineIndex = Integer.parseInt(indices[1]);
} else {
throw LOGGER.logExceptionAsError(new RuntimeException("Reference Elements not found"));
}
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord = readResults.get(readResultIndex).getLines().get(lineIndex).getWords()
.get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber, textWord.getConfidence());
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()), pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
List<FormWord> extractedWordList = words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
textWord.getConfidence())).collect(Collectors.toList());
return new IterableStream<FormWord>(extractedWordList);
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox)) {
return null;
}
Point topLeft = new Point(serviceBoundingBox.get(0), serviceBoundingBox.get(1));
Point topRight = new Point(serviceBoundingBox.get(2), serviceBoundingBox.get(3));
Point bottomLeft = new Point(serviceBoundingBox.get(4), serviceBoundingBox.get(5));
Point bottomRight = new Point(serviceBoundingBox.get(6), serviceBoundingBox.get(7));
return new BoundingBox(Arrays.asList(topLeft, topRight, bottomLeft, bottomRight));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
As per naming conventions, acronyms should follow camel-case convention but in this case, I am not sure naming this as `asUsReceipt` is good. | public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
} | FormField<String> merchantName = null; | public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
} | class ReceiptExtensions {
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = new ArrayList<>();
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
USReceiptItem receiptItem = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
receiptItem = new USReceiptItem(name, quantity, price, totalPrice);
}
receiptItemList.add(receiptItem);
}
return receiptItemList;
}
} | class ReceiptExtensions {
private ReceiptExtensions() {
}
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
@SuppressWarnings("unchecked")
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
@SuppressWarnings("unchecked")
private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
receiptItemList = new ArrayList<>();
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
}
receiptItemList.add(new USReceiptItem(name, quantity, price, totalPrice));
}
return receiptItemList;
}
} |
Uncomment this. | public static void main(final String[] args) {
FormTrainingClient client = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("https:
.buildClient().getFormTrainingClient();
String modelId = "{model-Id}";
CustomFormModel customModel = client.getCustomModel(modelId);
System.out.printf("Model Id: %s%n", customModel.getModelId());
System.out.printf("Model Status: %s%n", customModel.getModelStatus());
customModel.getSubModels().forEach(customFormSubModel -> {
System.out.printf("Custom Model Form type: %s%n", customFormSubModel.getFormType());
System.out.printf("Custom Model Accuracy: %s%n", customFormSubModel.getAccuracy());
if (customFormSubModel.getFieldMap() != null) {
customFormSubModel.getFieldMap().forEach((fieldText, customFormModelField) -> {
System.out.printf("Field Text: %s%n", fieldText);
System.out.printf("Field Accuracy: %s%n", customFormModelField.getAccuracy());
});
}
System.out.println("Model Training Info:");
customModel.getTrainingDocuments().forEach(trainingDocumentInfo -> {
System.out.printf("Training document Name: %s%n", trainingDocumentInfo.getName());
System.out.printf("Training document Status: %s%n", trainingDocumentInfo.getTrainingStatus());
});
});
AccountProperties accountProperties = client.getAccountProperties();
System.out.println("Account Properties");
System.out.printf("Model count in subscription : %s%n", modelId, accountProperties.getCount());
System.out.printf("Model limit in subsciption: %s%n", accountProperties.getLimit());
System.out.printf("Deleted model with model Id: %s operation completed with status: %s%n", modelId,
client.deleteModelWithResponse(modelId, Context.NONE).getStatusCode());
} | public static void main(final String[] args) {
FormTrainingClient client = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("https:
.buildClient().getFormTrainingClient();
String modelId = "{model-Id}";
CustomFormModel customModel = client.getCustomModel(modelId);
System.out.printf("Model Id: %s%n", customModel.getModelId());
System.out.printf("Model Status: %s%n", customModel.getModelStatus());
customModel.getSubModels().forEach(customFormSubModel -> {
System.out.printf("Custom Model Form type: %s%n", customFormSubModel.getFormType());
System.out.printf("Custom Model Accuracy: %s%n", customFormSubModel.getAccuracy());
if (customFormSubModel.getFieldMap() != null) {
customFormSubModel.getFieldMap().forEach((fieldText, customFormModelField) -> {
System.out.printf("Field Text: %s%n", fieldText);
System.out.printf("Field Accuracy: %s%n", customFormModelField.getAccuracy());
});
}
System.out.println("Model Training Info:");
customModel.getTrainingDocuments().forEach(trainingDocumentInfo -> {
System.out.printf("Training document Name: %s%n", trainingDocumentInfo.getName());
System.out.printf("Training document Status: %s%n", trainingDocumentInfo.getTrainingStatus());
});
});
AccountProperties accountProperties = client.getAccountProperties();
System.out.println("Account Properties");
System.out.printf("Model count in subscription : %s%n", modelId, accountProperties.getCount());
System.out.printf("Model limit in subsciption: %s%n", accountProperties.getLimit());
System.out.printf("Deleted model with model Id: %s operation completed with status: %s%n", modelId,
client.deleteModelWithResponse(modelId, Context.NONE).getStatusCode());
} | class CustomModelOperations {
/**
* Main program to invoke the demo for performing operations of a custom model.
*
* @param args Unused. Arguments to the program.
*/
} | class CustomModelOperations {
/**
* Main program to invoke the demo for performing operations of a custom model.
*
* @param args Unused. Arguments to the program.
*/
} | |
Dependent on this open PR - #9975. | public static void main(final String[] args) {
FormTrainingClient client = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("https:
.buildClient().getFormTrainingClient();
String modelId = "{model-Id}";
CustomFormModel customModel = client.getCustomModel(modelId);
System.out.printf("Model Id: %s%n", customModel.getModelId());
System.out.printf("Model Status: %s%n", customModel.getModelStatus());
customModel.getSubModels().forEach(customFormSubModel -> {
System.out.printf("Custom Model Form type: %s%n", customFormSubModel.getFormType());
System.out.printf("Custom Model Accuracy: %s%n", customFormSubModel.getAccuracy());
if (customFormSubModel.getFieldMap() != null) {
customFormSubModel.getFieldMap().forEach((fieldText, customFormModelField) -> {
System.out.printf("Field Text: %s%n", fieldText);
System.out.printf("Field Accuracy: %s%n", customFormModelField.getAccuracy());
});
}
System.out.println("Model Training Info:");
customModel.getTrainingDocuments().forEach(trainingDocumentInfo -> {
System.out.printf("Training document Name: %s%n", trainingDocumentInfo.getName());
System.out.printf("Training document Status: %s%n", trainingDocumentInfo.getTrainingStatus());
});
});
AccountProperties accountProperties = client.getAccountProperties();
System.out.println("Account Properties");
System.out.printf("Model count in subscription : %s%n", modelId, accountProperties.getCount());
System.out.printf("Model limit in subsciption: %s%n", accountProperties.getLimit());
System.out.printf("Deleted model with model Id: %s operation completed with status: %s%n", modelId,
client.deleteModelWithResponse(modelId, Context.NONE).getStatusCode());
} | public static void main(final String[] args) {
FormTrainingClient client = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("https:
.buildClient().getFormTrainingClient();
String modelId = "{model-Id}";
CustomFormModel customModel = client.getCustomModel(modelId);
System.out.printf("Model Id: %s%n", customModel.getModelId());
System.out.printf("Model Status: %s%n", customModel.getModelStatus());
customModel.getSubModels().forEach(customFormSubModel -> {
System.out.printf("Custom Model Form type: %s%n", customFormSubModel.getFormType());
System.out.printf("Custom Model Accuracy: %s%n", customFormSubModel.getAccuracy());
if (customFormSubModel.getFieldMap() != null) {
customFormSubModel.getFieldMap().forEach((fieldText, customFormModelField) -> {
System.out.printf("Field Text: %s%n", fieldText);
System.out.printf("Field Accuracy: %s%n", customFormModelField.getAccuracy());
});
}
System.out.println("Model Training Info:");
customModel.getTrainingDocuments().forEach(trainingDocumentInfo -> {
System.out.printf("Training document Name: %s%n", trainingDocumentInfo.getName());
System.out.printf("Training document Status: %s%n", trainingDocumentInfo.getTrainingStatus());
});
});
AccountProperties accountProperties = client.getAccountProperties();
System.out.println("Account Properties");
System.out.printf("Model count in subscription : %s%n", modelId, accountProperties.getCount());
System.out.printf("Model limit in subsciption: %s%n", accountProperties.getLimit());
System.out.printf("Deleted model with model Id: %s operation completed with status: %s%n", modelId,
client.deleteModelWithResponse(modelId, Context.NONE).getStatusCode());
} | class CustomModelOperations {
/**
* Main program to invoke the demo for performing operations of a custom model.
*
* @param args Unused. Arguments to the program.
*/
} | class CustomModelOperations {
/**
* Main program to invoke the demo for performing operations of a custom model.
*
* @param args Unused. Arguments to the program.
*/
} | |
updated. | private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = new ArrayList<>();
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
USReceiptItem receiptItem = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
receiptItem = new USReceiptItem(name, quantity, price, totalPrice);
}
receiptItemList.add(receiptItem);
}
return receiptItemList;
} | receiptItem = new USReceiptItem(name, quantity, price, totalPrice); | private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
receiptItemList = new ArrayList<>();
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
}
receiptItemList.add(new USReceiptItem(name, quantity, price, totalPrice));
}
return receiptItemList;
} | class ReceiptExtensions {
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
}
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
} | class ReceiptExtensions {
private ReceiptExtensions() {
}
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
@SuppressWarnings("unchecked")
public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
}
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
@SuppressWarnings("unchecked")
} |
It was originally designed to be accepting specific four parameters in the constructor but with service feedback that it could be in future grow to accept more than just 8 points extended, updated this to accept a list. | private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox)) {
return null;
}
Point topLeft = new Point(serviceBoundingBox.get(0), serviceBoundingBox.get(1));
Point topRight = new Point(serviceBoundingBox.get(2), serviceBoundingBox.get(3));
Point bottomLeft = new Point(serviceBoundingBox.get(4), serviceBoundingBox.get(5));
Point bottomRight = new Point(serviceBoundingBox.get(6), serviceBoundingBox.get(7));
return new BoundingBox(Arrays.asList(topLeft, topRight, bottomLeft, bottomRight));
} | return new BoundingBox(Arrays.asList(topLeft, topRight, bottomLeft, bottomRight)); | private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern COMPILE = Pattern.compile("[^0-9]+");
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap = null;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (int i = 0; i < documentResults.size(); i++) {
DocumentResult documentResultItem = documentResults.get(i);
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults, includeTextDetails);
}
extractedFormList.add(new RecognizedForm(extractedFieldMap, formType.get(), pageRange.get(),
new IterableStream<FormPage>(formPages.subList(pageRange.get().getStartPageNumber(), pageRange.get().getEndPageNumber()))));
}
if (pageResults != null) {
for (int i = 0; i < pageResults.size(); i++) {
PageResult pageResultItem = pageResults.get(i);
Integer pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getUnlabeledFieldMap(includeTextDetails, readResults, pageResultItem, pageNumber);
}
extractedFormList.add(
new RecognizedForm(extractedFieldMap, formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<FormPage>(Arrays.asList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<RecognizedReceipt> extractedReceiptList =
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm)).collect(Collectors.toList());
return new IterableStream<>(extractedReceiptList);
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
for (int i = 0; i < readResults.size(); i++) {
ReadResult readResultItem = readResults.get(i);
PageResult pageResultItem;
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
pageResultItem = pageResults.get(i);
perPageTableList = getPageTables(pageResultItem, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}
return formPages;
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails && !(CoreUtils.isNullOrEmpty(fieldValue.getElements()))) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), null, pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue, FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<String>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<LocalDate>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<Integer>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(fieldValue.getConfidence(), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<List<FormField<?>>>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<Map<String, FormField<?>>>(fieldValue.getConfidence(), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText, pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to convert the service returned {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject, Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> {
IterableStream<FormContent> formValueContentList = null;
if (!CoreUtils.isNullOrEmpty(fieldValue.getElements())) {
formValueContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(), formValueContentList),
fieldValue.getPage(), readResults));
});
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream().map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults)).collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(readResultItem.getHeight(), readResultItem.getAngle(), DimensionUnit.fromString(readResultItem.getUnit().toString()), readResultItem.getWidth(), perPageLineList,
perPageTableList
);
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
List<FormTableCell> tableCellList = dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(), null,
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isHeader(),
pageNumber))
.collect(Collectors.toList());
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
List<FormLine> formLines = readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
return formLines;
}
/**
* Helper method to set the {@link RecognizedForm
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
static Map<String, FormField<?>> getUnlabeledFieldMap(boolean includeTextDetails, List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
for (int i = 0; i < keyValuePairs.size(); i++) {
KeyValuePair keyValuePair = keyValuePairs.get(i);
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails && !CoreUtils.isNullOrEmpty(keyValuePair.getValue().getElements())) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + i;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(), toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(), toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(keyValuePair.getConfidence(), labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements, List<ReadResult> readResults, Integer pageNumber) {
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = COMPILE.matcher(elementString).replaceAll(" ").trim().split(" ");
int readResultIndex, lineIndex;
if (indices.length >= 1) {
readResultIndex = Integer.parseInt(indices[0]);
lineIndex = Integer.parseInt(indices[1]);
} else {
throw LOGGER.logExceptionAsError(new RuntimeException("Reference Elements not found"));
}
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord = readResults.get(readResultIndex).getLines().get(lineIndex).getWords()
.get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber, textWord.getConfidence());
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()), pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
List<FormWord> extractedWordList = words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
textWord.getConfidence())).collect(Collectors.toList());
return new IterableStream<FormWord>(extractedWordList);
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
} |
The respective fetch operation checks for the status on the returned model and raises exception on the model.errors. | public void extractReceipt() {
String receiptSourceUrl = "https:
SyncPoller<OperationResult, IterableStream<RecognizedReceipt>> syncPoller =
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptSourceUrl);
IterableStream<RecognizedReceipt> receiptPageResults = syncPoller.getFinalResult();
receiptPageResults.forEach(recognizedReceipt -> {
USReceipt usReceipt = ReceiptExtensions.asUSReceipt(recognizedReceipt);
System.out.printf("Page Number: %s%n", usReceipt.getMerchantName().getPageNumber());
System.out.printf("Merchant Name %s%n", usReceipt.getMerchantName().getName());
System.out.printf("Merchant Name Value: %s%n", usReceipt.getMerchantName().getFieldValue());
System.out.printf("Merchant Address %s%n", usReceipt.getMerchantAddress().getName());
System.out.printf("Merchant Address Value: %s%n", usReceipt.getMerchantAddress().getFieldValue());
System.out.printf("Merchant Phone Number %s%n", usReceipt.getMerchantPhoneNumber().getName());
System.out.printf("Merchant Phone Number Value: %s%n", usReceipt.getMerchantPhoneNumber().getFieldValue());
System.out.printf("Total: %s%n", usReceipt.getTotal().getName());
System.out.printf("Total Value: %s%n", usReceipt.getTotal().getFieldValue());
});
} | IterableStream<RecognizedReceipt> receiptPageResults = syncPoller.getFinalResult(); | public void extractReceipt() {
String receiptSourceUrl = "https:
SyncPoller<OperationResult, IterableStream<RecognizedReceipt>> syncPoller =
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptSourceUrl);
IterableStream<RecognizedReceipt> receiptPageResults = syncPoller.getFinalResult();
receiptPageResults.forEach(recognizedReceipt -> {
USReceipt usReceipt = ReceiptExtensions.asUSReceipt(recognizedReceipt);
System.out.printf("Page Number: %s%n", usReceipt.getMerchantName().getPageNumber());
System.out.printf("Merchant Name %s%n", usReceipt.getMerchantName().getName());
System.out.printf("Merchant Name Value: %s%n", usReceipt.getMerchantName().getFieldValue());
System.out.printf("Merchant Address %s%n", usReceipt.getMerchantAddress().getName());
System.out.printf("Merchant Address Value: %s%n", usReceipt.getMerchantAddress().getFieldValue());
System.out.printf("Merchant Phone Number %s%n", usReceipt.getMerchantPhoneNumber().getName());
System.out.printf("Merchant Phone Number Value: %s%n", usReceipt.getMerchantPhoneNumber().getFieldValue());
System.out.printf("Total: %s%n", usReceipt.getTotal().getName());
System.out.printf("Total Value: %s%n", usReceipt.getTotal().getFieldValue());
});
} | class ReadmeSamples {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for configuring http client.
*/
public void configureHttpClient() {
HttpClient client = new NettyAsyncHttpClientBuilder()
.port(8080)
.wiretap(true)
.build();
}
/**
* Code snippet for getting sync client using the API key authentication.
*/
public void useApiKeySyncClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for getting async client using API key authentication.
*/
public void useApiKeyAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for rotating API key of the client
*/
public void rotatingApiKey() {
AzureKeyCredential credential = new AzureKeyCredential("{api_key}");
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.apiKey(credential)
.endpoint("{endpoint}")
.buildClient();
credential.update("{new_api_key}");
}
} | class ReadmeSamples {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for configuring http client.
*/
public void configureHttpClient() {
HttpClient client = new NettyAsyncHttpClientBuilder()
.port(8080)
.wiretap(true)
.build();
}
/**
* Code snippet for getting sync client using the API key authentication.
*/
public void useApiKeySyncClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for getting async client using API key authentication.
*/
public void useApiKeyAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.apiKey(new AzureKeyCredential("{api_key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for rotating API key of the client
*/
public void rotatingApiKey() {
AzureKeyCredential credential = new AzureKeyCredential("{api_key}");
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.apiKey(credential)
.endpoint("{endpoint}")
.buildClient();
credential.update("{new_api_key}");
}
} |
same as https://github.com/Azure/azure-sdk-for-java/pull/9988#discussion_r407790136 | public FormTrainingClient getFormTrainingClient() {
return new FormTrainingClient(client.getFormTrainingAsyncClient());
} | } | public FormTrainingClient getFormTrainingClient() {
return new FormTrainingClient(client.getFormTrainingAsyncClient());
} | class FormRecognizerClient {
private final FormRecognizerAsyncClient client;
/**
* Create a {@link FormRecognizerClient client} that sends requests to the Form Recognizer service's endpoint.
* Each service call goes through the {@link FormRecognizerClientBuilder
*
* @param client The {@link FormRecognizerClient} that the client routes its request through.
*/
FormRecognizerClient(FormRecognizerAsyncClient client) {
this.client = client;
}
/**
* Creates a new {@link FormTrainingClient} object.The new {@code FormRecognizerClient} uses the same request policy
* pipeline as the {@code FormRecognizerClient}.
*
* @return A new {@link FormTrainingClient} object.
*/
/**
* Recognizes and extracts receipt data from documents using optical character recognition (OCR)
* and a custom trained model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param modelId The custom trained model Id to be used.
*
* @return A {@link SyncPoller} to poll the progress of the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginExtractCustomFormsFromUrl(String fileSourceUrl, String modelId) {
return beginExtractCustomFormsFromUrl(fileSourceUrl, modelId, false, null);
}
/**
* Recognizes and extracts receipt data from documents using optical character recognition (OCR)
* and a custom trained model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param modelId The custom trained model Id to be used.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} to poll the progress of the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginExtractCustomFormsFromUrl(String fileSourceUrl, String modelId, boolean includeTextDetails,
Duration pollInterval) {
return client.beginRecognizeCustomFormsFromUrl(fileSourceUrl, modelId, includeTextDetails, pollInterval)
.getSyncPoller();
}
/**
* Recognizes and extracts form data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param modelId The custom trained model Id to be used.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomForms(Flux<ByteBuffer> data, String modelId, long length, FormContentType formContentType) {
return beginRecognizeCustomForms(data, modelId, length, formContentType, false, null);
}
/**
* Recognizes and extracts form data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param modelId The custom trained model Id to be used.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param includeTextDetails Include text lines and element references in the result.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomForms(Flux<ByteBuffer> data, String modelId, long length, FormContentType formContentType,
boolean includeTextDetails, Duration pollInterval) {
return client.beginRecognizeCustomForms(data, modelId, length, formContentType,
includeTextDetails, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts layout data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
*
* @return A {@link SyncPoller} that polls the extract layout form operation until it has completed, has failed,
* or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<FormPage>> beginRecognizeContentFromUrl(String fileSourceUrl) {
return beginRecognizeContentFromUrl(fileSourceUrl, null);
}
/**
* Recognizes and extracts layout data using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed, has
* failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContentFromUrl(String sourceUrl, Duration pollInterval) {
return client.beginRecognizeContentFromUrl(sourceUrl, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts layout data using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed, has failed, or has
* been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContent(InputStream data, long length, FormContentType formContentType) {
return beginRecognizeContent(data, length, formContentType, null);
}
/**
* Recognizes and extracts layout data from the provided document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContent(InputStream data, long length, FormContentType formContentType,
Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeContent(buffer, formContentType, length, pollInterval)
.getSyncPoller();
}
/**
* Recognizes and extracts receipt data from documentsusing optical character recognition (OCR) and a
* prebuilt receipt trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
*
* @return A {@link SyncPoller} to poll the progress of the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceiptsFromUrl(String sourceUrl) {
return beginRecognizeReceiptsFromUrl(sourceUrl, false, null);
}
/**
* Recognizes and extracts receipt data from documentsusing optical character recognition (OCR) and a
* prebuilt receipt trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} to poll the progress of the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceiptsFromUrl(String sourceUrl, boolean includeTextDetails, Duration pollInterval) {
return client.beginRecognizeReceiptsFromUrl(sourceUrl, includeTextDetails, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts data from the provided document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceipts(InputStream data, long length, FormContentType formContentType) {
return beginRecognizeReceipts(data, length, formContentType, false, null);
}
/**
* Recognizes and extracts data from the providedd document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract receipt operation until it
* has completed, has failed, or has been cancelled.
*/
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceipts(InputStream data, long length, FormContentType formContentType,
boolean includeTextDetails, Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeReceipts(buffer, length, formContentType, includeTextDetails, pollInterval)
.getSyncPoller();
}
} | class FormRecognizerClient {
private final FormRecognizerAsyncClient client;
/**
* Create a {@link FormRecognizerClient client} that sends requests to the Form Recognizer service's endpoint.
* Each service call goes through the {@link FormRecognizerClientBuilder
*
* @param client The {@link FormRecognizerClient} that the client routes its request through.
*/
FormRecognizerClient(FormRecognizerAsyncClient client) {
this.client = client;
}
/**
* Creates a new {@link FormTrainingClient} object.The new {@code FormRecognizerClient} uses the same request policy
* pipeline as the {@code FormRecognizerClient}.
*
* @return A new {@link FormTrainingClient} object.
*/
/**
* Recognizes and extracts receipt data from documents using optical character recognition (OCR)
* and a custom trained model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param modelId The UUID string format custom trained model Id to be used.
*
* @return A {@link SyncPoller} to poll the progress of the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomFormsFromUrl(String fileSourceUrl, String modelId) {
return beginRecognizeCustomFormsFromUrl(fileSourceUrl, modelId, false, null);
}
/**
* Recognizes and extracts receipt data from documents using optical character recognition (OCR)
* and a custom trained model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param modelId The UUID string format custom trained model Id to be used.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} to poll the progress of the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomFormsFromUrl(String fileSourceUrl, String modelId, boolean includeTextDetails,
Duration pollInterval) {
return client.beginRecognizeCustomFormsFromUrl(fileSourceUrl, modelId, includeTextDetails, pollInterval)
.getSyncPoller();
}
/**
* Recognizes and extracts form data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param modelId The UUID string format custom trained model Id to be used.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomForms(InputStream data, String modelId, long length, FormContentType formContentType) {
return beginRecognizeCustomForms(data, modelId, length, formContentType, false, null);
}
/**
* Recognizes and extracts form data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param modelId The UUID string format custom trained model Id to be used.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract custom form operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedForm>>
beginRecognizeCustomForms(InputStream data, String modelId, long length, FormContentType formContentType,
boolean includeTextDetails, Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeCustomForms(buffer, modelId, length, formContentType,
includeTextDetails, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts layout data from documents using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param fileSourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
*
* @return A {@link SyncPoller} that polls the extract layout form operation until it has completed, has failed,
* or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<FormPage>> beginRecognizeContentFromUrl(String fileSourceUrl) {
return beginRecognizeContentFromUrl(fileSourceUrl, null);
}
/**
* Recognizes and extracts layout data using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed, has
* failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContentFromUrl(String sourceUrl, Duration pollInterval) {
return client.beginRecognizeContentFromUrl(sourceUrl, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts layout data using optical character recognition (OCR) and a custom trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support.</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed, has failed, or has
* been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContent(InputStream data, long length, FormContentType formContentType) {
return beginRecognizeContent(data, length, formContentType, null);
}
/**
* Recognizes and extracts layout data from the provided document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract layout operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<FormPage>>
beginRecognizeContent(InputStream data, long length, FormContentType formContentType,
Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeContent(buffer, formContentType, length, pollInterval)
.getSyncPoller();
}
/**
* Recognizes and extracts receipt data from documentsusing optical character recognition (OCR) and a
* prebuilt receipt trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
*
* @return A {@link SyncPoller} to poll the progress of the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceiptsFromUrl(String sourceUrl) {
return beginRecognizeReceiptsFromUrl(sourceUrl, false, null);
}
/**
* Recognizes and extracts receipt data from documentsusing optical character recognition (OCR) and a
* prebuilt receipt trained
* model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param sourceUrl The source URL to the input document. Size of the file must be less than 20 MB.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} to poll the progress of the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceiptsFromUrl(String sourceUrl, boolean includeTextDetails, Duration pollInterval) {
return client.beginRecognizeReceiptsFromUrl(sourceUrl, includeTextDetails, pollInterval).getSyncPoller();
}
/**
* Recognizes and extracts data from the provided document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
*
* @return A {@link SyncPoller} that polls the extract receipt operation until it has completed,
* has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceipts(InputStream data, long length, FormContentType formContentType) {
return beginRecognizeReceipts(data, length, formContentType, false, null);
}
/**
* Recognizes and extracts data from the providedd document data using optical character recognition (OCR)
* and a prebuilt trained receipt model.
* <p>The service does not support cancellation of the long running operation and returns with an
* error message indicating absence of cancellation support</p>
*
* @param data The data of the document to be extract receipt information from.
* @param length The exact length of the data. Size of the file must be less than 20 MB.
* @param formContentType Supported Media types including .pdf, .jpg, .png or .tiff type file stream.
* @param includeTextDetails Include text lines and element references in the result.
* @param pollInterval Duration between each poll for the operation status. If none is specified, a default of
* 5 seconds is used.
*
* @return A {@link SyncPoller} that polls the extract receipt operation until it
* has completed, has failed, or has been cancelled.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<OperationResult, IterableStream<RecognizedReceipt>>
beginRecognizeReceipts(InputStream data, long length, FormContentType formContentType,
boolean includeTextDetails, Duration pollInterval) {
Flux<ByteBuffer> buffer = Utility.convertStreamToByteBuffer(data);
return client.beginRecognizeReceipts(buffer, length, formContentType, includeTextDetails, pollInterval)
.getSyncPoller();
}
} |
Looks like label and name are mixed (?) name should contain the "field-" string, and label the value you have in `eachField` | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
DEFAULT_CONFIDENCE_VALUE,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | String fieldLabel = "field-" + i; | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
public static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Will this apply even if this is accuracy and not confidence? | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
DEFAULT_CONFIDENCE_VALUE,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | DEFAULT_CONFIDENCE_VALUE, | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
public static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
It shouldn't apply to accuracy, updated! | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
DEFAULT_CONFIDENCE_VALUE,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | DEFAULT_CONFIDENCE_VALUE, | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
public static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Why do we instantiate `clusterFieldSize` in the for loop and use it nowhere else? | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) { | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
I'm seeing a lot of cases where we are looping over an iterable where we also need to know which index we are at, should we add a helper function which takes a `BiConsumer<Integer, T>` which will maintain the index count and pass the appropriate element? ```java static void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) { int[] index = new int[] { 0 }; iterable.forEach(element -> biConsumer.accept(index[0]++, element)); } ``` | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | String eachField = clusterFields.get(i); | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
not sure how Java does it, but asking the same Krista pointed out in in .net, TrainResult can be null. So, will `modelResponse.getTrainResult().getTrainingDocuments()` and all other references work? | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | modelResponse.getTrainResult().getTrainingDocuments().stream() | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Updated to check for trainResult as it can be null before referencing the training documents. | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | modelResponse.getTrainResult().getTrainingDocuments().stream() | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Updated! | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | String eachField = clusterFields.get(i); | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Oh yes, thank you for finding that! | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
new IterableStream<FormRecognizerError>(transformTrainingErrors(trainingDocumentItem.getErrors()))))
.collect(Collectors.toList());
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
for (int i = 0, clusterFieldsSize = clusterFields.size(); i < clusterFieldsSize; i++) {
String eachField = clusterFields.get(i);
String fieldLabel = "field-" + i;
fieldMap.put(fieldLabel, new CustomFormModelField(fieldLabel, eachField, null));
}
subModelList.add(new CustomFormSubModel(
DEFAULT_CONFIDENCE_VALUE,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
ModelTrainingStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
new IterableStream<FormRecognizerError>(
transformTrainingErrors(modelResponse.getTrainResult().getErrors())),
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | String fieldLabel = "field-" + i; | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
public static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new ArrayList<>();
} else {
return trainingErrorList.stream().map(errorInformation ->
new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage())).collect(Collectors.toList());
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
I think that is what we had decided as a team. Do you have any suggestions we discuss it with the crew. | public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
} | FormField<String> merchantName = null; | public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
} | class ReceiptExtensions {
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = new ArrayList<>();
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
USReceiptItem receiptItem = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
receiptItem = new USReceiptItem(name, quantity, price, totalPrice);
}
receiptItemList.add(receiptItem);
}
return receiptItemList;
}
} | class ReceiptExtensions {
private ReceiptExtensions() {
}
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
@SuppressWarnings("unchecked")
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
@SuppressWarnings("unchecked")
private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
receiptItemList = new ArrayList<>();
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
}
receiptItemList.add(new USReceiptItem(name, quantity, price, totalPrice));
}
return receiptItemList;
}
} |
This should use `IterableStream.of`, there is a possibility where `trainingDocumentInfoList` is `null` and the constructor will throw a `NullPointerException` but the factory method will return an empty instance. | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
IterableStream<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList)); | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static IterableStream<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new IterableStream<FormRecognizerError>(Collections.emptyList());
} else {
return new IterableStream<>(trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList()));
}
}
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param list A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> list) {
CollectionTransformer<ModelInfo, CustomFormModelInfo> transformer =
new CollectionTransformer<ModelInfo, CustomFormModelInfo>() {
@Override
CustomFormModelInfo transform(ModelInfo modelInfo) {
return new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime());
}
};
return transformer.transform(list);
}
/**
* A generic transformation class for collection that transform from type {@code E} to type {@code F}.
*
* @param <E> Transform type E to another type.
* @param <F> Transform to type F from another type.
*/
abstract static class CollectionTransformer<E, F> {
abstract F transform(E e);
List<F> transform(List<E> list) {
List<F> newList = new ArrayList<>();
for (E e : list) {
newList.add(transform(e));
}
return newList;
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Could `subModelList` ever end up empty if both the `if` and `else if` don't pass? | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
IterableStream<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | List<CustomFormSubModel> subModelList = new ArrayList<>(); | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static IterableStream<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new IterableStream<FormRecognizerError>(Collections.emptyList());
} else {
return new IterableStream<>(trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList()));
}
}
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param list A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> list) {
CollectionTransformer<ModelInfo, CustomFormModelInfo> transformer =
new CollectionTransformer<ModelInfo, CustomFormModelInfo>() {
@Override
CustomFormModelInfo transform(ModelInfo modelInfo) {
return new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime());
}
};
return transformer.transform(list);
}
/**
* A generic transformation class for collection that transform from type {@code E} to type {@code F}.
*
* @param <E> Transform type E to another type.
* @param <F> Transform to type F from another type.
*/
abstract static class CollectionTransformer<E, F> {
abstract F transform(E e);
List<F> transform(List<E> list) {
List<F> newList = new ArrayList<>();
for (E e : list) {
newList.add(transform(e));
}
return newList;
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Do these need to be atomic references? | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
} | AtomicReference<String> formType = new AtomicReference<>("form-"); | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
Since `PageRange` is immutable could we create a static instance that is used here? | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
} | pageRange.set(new PageRange(1, 1)); | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
What are some example page ranges the service would return? Taking an example from most printing UIs where you can set an option of print page 3, would the service return a page range of `{ 3 }` or would it be `{ 3, 3 }`? If it is the former would it make defaults to range `1-1` incorrect? | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
} | if (documentPageRange.size() == 2) { | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
The logic here shows that you can't have both labeled and unlabeled, can you change this into an `if / else if`. Once that is done you can remove the check below for `CoreUtils.isNullOrEmpty(documentResults)`. | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
} | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} | |
Could you try in PLAYBACK whether these are now OK? | public void canPostDeploymentWhatIfOnResourceGroup() throws Exception {
final String dpName = "dpA" + testId;
resourceClient.deployments()
.define(dpName)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.create();
PagedIterable<Deployment> deployments = resourceClient.deployments().listByResourceGroup(rgName);
boolean found = false;
for (Deployment deployment : deployments) {
if (deployment.name().equals(dpName)) {
found = true;
}
}
Assertions.assertTrue(found);
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dpName);
Assertions.assertNotNull(deployment);
Assertions.assertEquals("Succeeded", deployment.provisioningState());
WhatIfOperationResult result = deployment.prepareWhatIf()
.withIncrementalMode()
.withWhatIfTemplateLink(templateUri, contentVersion)
.whatIf();
Assertions.assertEquals("Succeeded", result.status());
resourceClient.genericResources().delete(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15");
} | public void canPostDeploymentWhatIfOnResourceGroup() throws Exception {
final String dpName = "dpA" + testId;
resourceClient.deployments()
.define(dpName)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.create();
PagedIterable<Deployment> deployments = resourceClient.deployments().listByResourceGroup(rgName);
boolean found = false;
for (Deployment deployment : deployments) {
if (deployment.name().equals(dpName)) {
found = true;
}
}
Assertions.assertTrue(found);
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dpName);
Assertions.assertNotNull(deployment);
Assertions.assertEquals("Succeeded", deployment.provisioningState());
WhatIfOperationResult result = deployment.prepareWhatIf()
.withIncrementalMode()
.withWhatIfTemplateLink(templateUri, contentVersion)
.whatIf();
Assertions.assertEquals("Succeeded", result.status());
Assertions.assertEquals(3, result.changes().size());
resourceClient.genericResources().delete(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15");
} | class DeploymentsTests extends ResourceManagerTestBase {
private ResourceGroups resourceGroups;
private ResourceGroup resourceGroup;
private String testId;
private String rgName;
private static String templateUri = "https:
private static String blankTemplateUri = "https:
private static String parametersUri = "https:
private static String updateTemplate = "{\"$schema\":\"https:
private static String updateParameters = "{\"vnetAddressPrefix\":{\"value\":\"10.0.0.0/16\"},\"subnet1Name\":{\"value\":\"Subnet1\"},\"subnet1Prefix\":{\"value\":\"10.0.0.0/24\"}}";
private static String contentVersion = "1.0.0.0";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) {
super.initializeClients(restClient, defaultSubscription, domain);
testId = sdkContext.randomResourceName("", 9);
resourceGroups = resourceClient.resourceGroups();
rgName = "rg" + testId;
resourceGroup = resourceGroups.define(rgName)
.withRegion(Region.US_SOUTH_CENTRAL)
.create();
}
@Override
protected void cleanUpResources() {
resourceGroups.beginDeleteByName(rgName);
}
@Test
public void canDeployVirtualNetwork() throws Exception {
final String dpName = "dpA" + testId;
resourceClient.deployments()
.define(dpName)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.create();
PagedIterable<Deployment> deployments = resourceClient.deployments().listByResourceGroup(rgName);
boolean found = false;
for (Deployment deployment : deployments) {
if (deployment.name().equals(dpName)) {
found = true;
}
}
Assertions.assertTrue(found);
Assertions.assertTrue(resourceClient.deployments().checkExistence(rgName, dpName));
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dpName);
Assertions.assertNotNull(deployment);
Assertions.assertEquals("Succeeded", deployment.provisioningState());
GenericResource generic = resourceClient.genericResources().get(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15");
Assertions.assertNotNull(generic);
Assertions.assertNotNull(deployment.exportTemplate().templateAsJson());
Assertions.assertNotNull(resourceGroup.exportTemplate(ResourceGroupExportTemplateOptions.INCLUDE_BOTH));
PagedIterable<DeploymentOperation> operations = deployment.deploymentOperations().list();
Assertions.assertEquals(4, TestUtilities.getSize(operations));
DeploymentOperation op = deployment.deploymentOperations().getById(operations.iterator().next().operationId());
Assertions.assertNotNull(op);
resourceClient.genericResources().delete(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15");
}
@Test
@Test
public void canPostDeploymentWhatIfOnSubscription() throws Exception {
final String dpName = "dpA" + testId;
resourceClient.deployments()
.define(dpName)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.create();
PagedIterable<Deployment> deployments = resourceClient.deployments().listByResourceGroup(rgName);
boolean found = false;
for (Deployment deployment : deployments) {
if (deployment.name().equals(dpName)) {
found = true;
}
}
Assertions.assertTrue(found);
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dpName);
Assertions.assertNotNull(deployment);
Assertions.assertEquals("Succeeded", deployment.provisioningState());
WhatIfOperationResult result = deployment.prepareWhatIf()
.withLocation("westus")
.withIncrementalMode()
.withWhatIfTemplateLink(blankTemplateUri, contentVersion)
.whatIfAtSubscriptionScope();
Assertions.assertEquals("Succeeded", result.status());
resourceClient.genericResources().delete(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15");
}
@Test
@Disabled("deployment.cancel() doesn't throw but provisining state says Running not Cancelled...")
public void canCancelVirtualNetworkDeployment() throws Exception {
final String dp = "dpB" + testId;
resourceClient.deployments()
.define(dp)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.beginCreate();
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals(dp, deployment.name());
deployment.cancel();
deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals("Canceled", deployment.provisioningState());
Assertions.assertFalse(resourceClient.genericResources().checkExistence(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15"));
}
@Test
public void canUpdateVirtualNetworkDeployment() throws Exception {
final String dp = "dpC" + testId;
Deployment createdDeployment = resourceClient.deployments()
.define(dp)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.beginCreate();
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals(createdDeployment.correlationId(), deployment.correlationId());
Assertions.assertEquals(dp, deployment.name());
deployment.cancel();
deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals("Canceled", deployment.provisioningState());
deployment.update()
.withTemplate(updateTemplate)
.withParameters(updateParameters)
.withMode(DeploymentMode.INCREMENTAL)
.apply();
deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals(DeploymentMode.INCREMENTAL, deployment.mode());
Assertions.assertEquals("Succeeded", deployment.provisioningState());
GenericResource genericVnet = resourceClient.genericResources().get(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet2", "2015-06-15");
Assertions.assertNotNull(genericVnet);
resourceClient.genericResources().delete(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet2", "2015-06-15");
}
} | class DeploymentsTests extends ResourceManagerTestBase {
private ResourceGroups resourceGroups;
private ResourceGroup resourceGroup;
private String testId;
private String rgName;
private static String templateUri = "https:
private static String blankTemplateUri = "https:
private static String parametersUri = "https:
private static String updateTemplate = "{\"$schema\":\"https:
private static String updateParameters = "{\"vnetAddressPrefix\":{\"value\":\"10.0.0.0/16\"},\"subnet1Name\":{\"value\":\"Subnet1\"},\"subnet1Prefix\":{\"value\":\"10.0.0.0/24\"}}";
private static String contentVersion = "1.0.0.0";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) {
super.initializeClients(restClient, defaultSubscription, domain);
testId = sdkContext.randomResourceName("", 9);
resourceGroups = resourceClient.resourceGroups();
rgName = "rg" + testId;
resourceGroup = resourceGroups.define(rgName)
.withRegion(Region.US_SOUTH_CENTRAL)
.create();
}
@Override
protected void cleanUpResources() {
resourceGroups.beginDeleteByName(rgName);
}
@Test
public void canDeployVirtualNetwork() throws Exception {
final String dpName = "dpA" + testId;
resourceClient.deployments()
.define(dpName)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.create();
PagedIterable<Deployment> deployments = resourceClient.deployments().listByResourceGroup(rgName);
boolean found = false;
for (Deployment deployment : deployments) {
if (deployment.name().equals(dpName)) {
found = true;
}
}
Assertions.assertTrue(found);
Assertions.assertTrue(resourceClient.deployments().checkExistence(rgName, dpName));
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dpName);
Assertions.assertNotNull(deployment);
Assertions.assertEquals("Succeeded", deployment.provisioningState());
GenericResource generic = resourceClient.genericResources().get(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15");
Assertions.assertNotNull(generic);
Assertions.assertNotNull(deployment.exportTemplate().templateAsJson());
Assertions.assertNotNull(resourceGroup.exportTemplate(ResourceGroupExportTemplateOptions.INCLUDE_BOTH));
PagedIterable<DeploymentOperation> operations = deployment.deploymentOperations().list();
Assertions.assertEquals(4, TestUtilities.getSize(operations));
DeploymentOperation op = deployment.deploymentOperations().getById(operations.iterator().next().operationId());
Assertions.assertNotNull(op);
resourceClient.genericResources().delete(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15");
}
@Test
@Test
public void canPostDeploymentWhatIfOnSubscription() throws Exception {
final String dpName = "dpA" + testId;
resourceClient.deployments()
.define(dpName)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.create();
PagedIterable<Deployment> deployments = resourceClient.deployments().listByResourceGroup(rgName);
boolean found = false;
for (Deployment deployment : deployments) {
if (deployment.name().equals(dpName)) {
found = true;
}
}
Assertions.assertTrue(found);
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dpName);
Assertions.assertNotNull(deployment);
Assertions.assertEquals("Succeeded", deployment.provisioningState());
WhatIfOperationResult result = deployment.prepareWhatIf()
.withLocation("westus")
.withIncrementalMode()
.withWhatIfTemplateLink(blankTemplateUri, contentVersion)
.whatIfAtSubscriptionScope();
Assertions.assertEquals("Succeeded", result.status());
Assertions.assertEquals(0, result.changes().size());
resourceClient.genericResources().delete(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15");
}
@Test
@Disabled("deployment.cancel() doesn't throw but provisining state says Running not Cancelled...")
public void canCancelVirtualNetworkDeployment() throws Exception {
final String dp = "dpB" + testId;
resourceClient.deployments()
.define(dp)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.beginCreate();
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals(dp, deployment.name());
deployment.cancel();
deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals("Canceled", deployment.provisioningState());
Assertions.assertFalse(resourceClient.genericResources().checkExistence(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet1", "2015-06-15"));
}
@Test
public void canUpdateVirtualNetworkDeployment() throws Exception {
final String dp = "dpC" + testId;
Deployment createdDeployment = resourceClient.deployments()
.define(dp)
.withExistingResourceGroup(rgName)
.withTemplateLink(templateUri, contentVersion)
.withParametersLink(parametersUri, contentVersion)
.withMode(DeploymentMode.COMPLETE)
.beginCreate();
Deployment deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals(createdDeployment.correlationId(), deployment.correlationId());
Assertions.assertEquals(dp, deployment.name());
deployment.cancel();
deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals("Canceled", deployment.provisioningState());
deployment.update()
.withTemplate(updateTemplate)
.withParameters(updateParameters)
.withMode(DeploymentMode.INCREMENTAL)
.apply();
deployment = resourceClient.deployments().getByResourceGroup(rgName, dp);
Assertions.assertEquals(DeploymentMode.INCREMENTAL, deployment.mode());
Assertions.assertEquals("Succeeded", deployment.provisioningState());
GenericResource genericVnet = resourceClient.genericResources().get(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet2", "2015-06-15");
Assertions.assertNotNull(genericVnet);
resourceClient.genericResources().delete(rgName, "Microsoft.Network", "", "virtualnetworks", "VNet2", "2015-06-15");
}
} | |
Could we make this into a variable outside of the scope of the for loop? Should only need to check this once. | static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
} | if (!CoreUtils.isNullOrEmpty(pageResults)) { | static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
This could be simplified a bit by using a stream, the logic inside of the lambda would be the same except adding it to the list. ```java return pageResultItem.getTables().stream() .map(/* convert to FormTable */) .collect(Collectors.toList()); ``` | static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
} | pageResultItem.getTables().forEach(dataTable -> { | static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
That is not a known case and should not occur. | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with status: %s",
modelInfo.getModelId(), modelInfo.getStatus())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
IterableStream<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
new IterableStream<TrainingDocumentInfo>(trainingDocumentInfoList));
} | List<CustomFormSubModel> subModelList = new ArrayList<>(); | static CustomFormModel toCustomFormModel(Model modelResponse) {
ModelInfo modelInfo = modelResponse.getModelInfo();
if (modelInfo.getStatus() == ModelStatus.INVALID) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(String.format("Model Id %s returned with invalid status.",
modelInfo.getModelId())));
}
List<TrainingDocumentInfo> trainingDocumentInfoList = null;
List<FormRecognizerError> modelErrors = null;
if (modelResponse.getTrainResult() != null) {
trainingDocumentInfoList =
modelResponse.getTrainResult().getTrainingDocuments().stream()
.map(trainingDocumentItem -> new TrainingDocumentInfo(
trainingDocumentItem.getDocumentName(),
TrainingStatus.fromString(trainingDocumentItem.getStatus().toString()),
trainingDocumentItem.getPages(),
transformTrainingErrors(trainingDocumentItem.getErrors())))
.collect(Collectors.toList());
modelErrors = transformTrainingErrors(modelResponse.getTrainResult().getErrors());
}
List<CustomFormSubModel> subModelList = new ArrayList<>();
String formType = "form-";
if (modelResponse.getKeys() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getKeys().getClusters().forEach((clusterKey, clusterFields) -> {
forEachWithIndex(clusterFields, (index, eachField) -> {
String fieldName = "field-" + index;
fieldMap.put(fieldName, new CustomFormModelField(eachField, fieldName, null));
});
subModelList.add(new CustomFormSubModel(
null,
fieldMap,
formType + clusterKey));
});
} else if (modelResponse.getTrainResult() != null && modelResponse.getTrainResult().getFields() != null) {
Map<String, CustomFormModelField> fieldMap = new TreeMap<>();
modelResponse.getTrainResult().getFields()
.forEach(formFieldsReport -> fieldMap.put(formFieldsReport.getFieldName(),
new CustomFormModelField(null, formFieldsReport.getFieldName(),
formFieldsReport.getAccuracy())));
subModelList.add(new CustomFormSubModel(
modelResponse.getTrainResult().getAverageModelAccuracy(),
fieldMap,
formType + modelInfo.getModelId()));
}
return new CustomFormModel(
modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(),
modelInfo.getLastUpdatedDateTime(),
new IterableStream<>(subModelList),
modelErrors,
trainingDocumentInfoList);
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static IterableStream<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return new IterableStream<FormRecognizerError>(Collections.emptyList());
} else {
return new IterableStream<>(trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList()));
}
}
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param list A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> list) {
CollectionTransformer<ModelInfo, CustomFormModelInfo> transformer =
new CollectionTransformer<ModelInfo, CustomFormModelInfo>() {
@Override
CustomFormModelInfo transform(ModelInfo modelInfo) {
return new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime());
}
};
return transformer.transform(list);
}
/**
* A generic transformation class for collection that transform from type {@code E} to type {@code F}.
*
* @param <E> Transform type E to another type.
* @param <F> Transform to type F from another type.
*/
abstract static class CollectionTransformer<E, F> {
abstract F transform(E e);
List<F> transform(List<E> list) {
List<F> newList = new ArrayList<>();
for (E e : list) {
newList.add(transform(e));
}
return newList;
}
}
} | class CustomModelTransforms {
private static final ClientLogger LOGGER = new ClientLogger(CustomModelTransforms.class);
private CustomModelTransforms() {
}
/**
* Helper method to convert the {@link Model model Response} from service to {@link CustomFormModel}.
*
* @param modelResponse The {@code Model model response} returned from the service.
*
* @return The {@link CustomFormModel}.
*/
/**
* Transform a list of {@link ModelInfo} to a list of {@link CustomFormModelInfo}.
*
* @param modelInfoList A list of {@link ModelInfo}.
*
* @return A list of {@link CustomFormModelInfo}.
*/
static List<CustomFormModelInfo> toCustomFormModelInfo(List<ModelInfo> modelInfoList) {
return modelInfoList.stream().map(modelInfo -> new CustomFormModelInfo(modelInfo.getModelId().toString(),
CustomFormModelStatus.fromString(modelInfo.getStatus().toString()),
modelInfo.getCreatedDateTime(), modelInfo.getLastUpdatedDateTime())).collect(Collectors.toList());
}
/**
* Helper method to convert the list of {@link ErrorInformation} to list of {@link FormRecognizerError}.
*
* @param trainingErrorList The list of {@link ErrorInformation}.
*
* @return The list of {@link FormRecognizerError}
*/
private static List<FormRecognizerError> transformTrainingErrors(
List<ErrorInformation> trainingErrorList) {
if (CoreUtils.isNullOrEmpty(trainingErrorList)) {
return Collections.emptyList();
} else {
return trainingErrorList.stream()
.map(errorInformation -> new FormRecognizerError(errorInformation.getCode(),
errorInformation.getMessage()))
.collect(Collectors.toList());
}
}
} |
Since I am using these two variables in lambda expression below they need to be final in nature. | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
} | AtomicReference<String> formType = new AtomicReference<>("form-"); | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
Yes, it would return {3, 3} according to the current implementation of the service. | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
} | if (documentPageRange.size() == 2) { | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
Either leave `US` as-is or expand it. Don't really like changing this to `Us`. Check with @JonathanGiles too. | public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
} | FormField<String> merchantName = null; | public static USReceipt asUSReceipt(RecognizedReceipt receipt) {
USReceiptType receiptType = null;
FormField<String> merchantName = null;
FormField<String> merchantAddress = null;
FormField<String> merchantPhoneNumber = null;
FormField<Float> subtotal = null;
FormField<Float> tax = null;
FormField<Float> tip = null;
FormField<Float> total = null;
FormField<LocalDate> transactionDate = null;
FormField<String> transactionTime = null;
List<USReceiptItem> receiptItems = null;
for (Map.Entry<String, FormField<?>> entry : receipt.getRecognizedForm().getFields().entrySet()) {
String key = entry.getKey();
FormField<?> fieldValue = entry.getValue();
switch (key) {
case "ReceiptType":
receiptType = new USReceiptType(key, fieldValue.getConfidence());
break;
case "MerchantName":
merchantName = (FormField<String>) fieldValue;
break;
case "MerchantAddress":
merchantAddress = (FormField<String>) fieldValue;
break;
case "MerchantPhoneNumber":
merchantPhoneNumber = (FormField<String>) fieldValue;
break;
case "Subtotal":
subtotal = (FormField<Float>) fieldValue;
break;
case "Tax":
tax = (FormField<Float>) fieldValue;
break;
case "Tip":
tip = (FormField<Float>) fieldValue;
break;
case "Total":
total = (FormField<Float>) fieldValue;
break;
case "TransactionDate":
transactionDate = (FormField<LocalDate>) fieldValue;
break;
case "TransactionTime":
transactionTime = (FormField<String>) fieldValue;
break;
case "Items":
receiptItems = toReceiptItems(fieldValue);
break;
default:
break;
}
}
return new USReceipt(receipt.getReceiptLocale(), receipt.getRecognizedForm(), receiptItems, receiptType,
merchantName, merchantAddress, merchantPhoneNumber, subtotal, tax, tip, total, transactionDate,
transactionTime);
} | class ReceiptExtensions {
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = new ArrayList<>();
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
USReceiptItem receiptItem = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
receiptItem = new USReceiptItem(name, quantity, price, totalPrice);
}
receiptItemList.add(receiptItem);
}
return receiptItemList;
}
} | class ReceiptExtensions {
private ReceiptExtensions() {
}
/**
* Static method to convert an incoming receipt to a {@link USReceipt type}.
*
* @param receipt The {@link RecognizedReceipt recognized receipt}.
*
* @return The converted {@link USReceipt US locale receipt} type.
*/
@SuppressWarnings("unchecked")
/**
* Helper method to convert the service level
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to SDK level {@link USReceiptItem receipt items}.
*
* @param fieldValueItems The strongly typed field values.
*
* @return A list of {@link USReceiptItem}.
*/
@SuppressWarnings("unchecked")
private static List<USReceiptItem> toReceiptItems(FormField<?> fieldValueItems) {
List<FormField<?>> fieldValueArray = (List<FormField<?>>) fieldValueItems.getFieldValue();
List<USReceiptItem> receiptItemList = null;
for (FormField<?> eachFieldValue : fieldValueArray) {
receiptItemList = new ArrayList<>();
Map<String, FormField<?>> objectValue = ((Map<String, FormField<?>>) (eachFieldValue.getFieldValue()));
FormField<String> name = null;
FormField<Float> quantity = null;
FormField<Float> price = null;
FormField<Float> totalPrice = null;
for (Map.Entry<String, FormField<?>> entry : objectValue.entrySet()) {
String key = entry.getKey();
if (QUANTITY.toString().equals(key)) {
quantity = (FormField<Float>) entry.getValue();
} else if (NAME.toString().equals(key)) {
name = (FormField<String>) entry.getValue();
} else if (PRICE.toString().equals(key)) {
price = (FormField<Float>) entry.getValue();
} else if (TOTAL_PRICE.toString().equals(key)) {
totalPrice = (FormField<Float>) entry.getValue();
}
}
receiptItemList.add(new USReceiptItem(name, quantity, price, totalPrice));
}
return receiptItemList;
}
} |
edit: updated this. | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = new ArrayList<>();
Map<String, FormField<?>> extractedFieldMap;
AtomicReference<PageRange> pageRange = new AtomicReference<>();
AtomicReference<String> formType = new AtomicReference<>("form-");
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
for (DocumentResult documentResultItem : documentResults) {
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange.set(new PageRange(documentPageRange.get(0), documentPageRange.get(1)));
} else {
pageRange.set(new PageRange(1, 1));
}
formType.set(documentResultItem.getDocType());
extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults, includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
pageRange.get(),
new IterableStream<>(formPages.subList(pageRange.get().getStartPageNumber() - 1,
pageRange.get().getEndPageNumber()))));
}
}
if (!CoreUtils.isNullOrEmpty(pageResults)) {
for (PageResult pageResultItem : pageResults) {
int pageNumber = pageResultItem.getPage();
if (CoreUtils.isNullOrEmpty(documentResults)) {
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.set(formType.get() + clusterId);
}
extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults, pageResultItem,
pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.get(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
}
return extractedFormList;
} | AtomicReference<String> formType = new AtomicReference<>("form-"); | static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList = null;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeTextDetails);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
PageRange pageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
pageRange = new PageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
pageRange = new PageRange(1, 1);
}
Map<String, FormField<?>> extractedFieldMap = getUnlabeledFieldMap(documentResultItem, readResults,
includeTextDetails);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
pageRange,
new IterableStream<>(formPages.subList(pageRange.getStartPageNumber() - 1,
pageRange.getEndPageNumber()))));
}
} else if (!CoreUtils.isNullOrEmpty(pageResults)) {
extractedFormList = new ArrayList<>();
for (PageResult pageResultItem : pageResults) {
StringBuffer formType = new StringBuffer("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField<?>> extractedFieldMap = getLabeledFieldMap(includeTextDetails, readResults,
pageResultItem, pageNumber);
extractedFormList.add(new RecognizedForm(
extractedFieldMap,
formType.toString(),
new PageRange(pageNumber, pageNumber),
new IterableStream<>(Collections.singletonList(formPages.get(pageNumber - 1)))));
}
}
return extractedFormList;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(pageResults)) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
List<FormTable> extractedTablesList = new ArrayList<>();
pageResultItem.getTables().forEach(dataTable -> {
IterableStream<FormTableCell> tableCellList = new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber,
setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)
))
.collect(Collectors.toList()));
FormTable extractedTable = new FormTable(dataTable.getRows(), dataTable.getColumns(), tableCellList);
extractedTablesList.add(extractedTable);
});
return extractedTablesList;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(), new IterableStream<>(toWords(textLine.getWords(), readResultItem.getPage()))))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
IterableStream<FormContent> formContentList = null;
Integer pageNumber = fieldValue.getPage();
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(null, null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) -> fieldValueObjectMap.put(key,
setFormField(null, key, fieldValue, new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()), fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)),
fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = new IterableStream<>(Collections.emptyList());
IterableStream<FormContent> formValueContentList = new IterableStream<>(Collections.emptyList());
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults, pageNumber);
}
String fieldName = "field-" + index;
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new IterableStream<>(Collections.emptyList());
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()), pageNumber,
setDefaultConfidenceValue(textWord.getConfidence())))
.collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(new IterableStream<Point>(pointList));
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedForm}.
*/
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedReceipt}.
*
* @param analyzeResult The service returned result for analyze receipts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code RecognizedReceipt}.
*/
static IterableStream<RecognizedReceipt> toReceipt(AnalyzeResult analyzeResult, boolean includeTextDetails) {
return new IterableStream<>(
toRecognizedForm(analyzeResult, includeTextDetails).stream()
.map(recognizedForm ->
new RecognizedReceipt("en-US", recognizedForm))
.collect(Collectors.toList()));
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The IterableStream of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeTextDetails) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeTextDetails && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList));
}));
return formPages;
}
/**
* Given an iterable will apply the indexing function to it and return the index and each item of the iterable.
*
* @param iterable the list to apply the mapping function to.
* @param biConsumer the function which accepts the index and the each value of the iterable.
* @param <T> the type of items being returned.
*/
static <T> void forEachWithIndex(Iterable<T> iterable, BiConsumer<Integer, T> biConsumer) {
int[] index = new int[]{0};
iterable.forEach(element -> biConsumer.accept(index[0]++, element));
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, Integer pageNumber) {
return pageResultItem.getTables().stream()
.map(dataTable ->
new FormTable(dataTable.getRows(), dataTable.getColumns(),
new IterableStream<>(dataTable.getCells().stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan(), dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults, pageNumber)))
.collect(Collectors.toList()))))
.collect(Collectors.toList());
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage())))
.collect(Collectors.toList());
}
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
*
* @return The {@code RecognizedForm
*/
private static Map<String, FormField<?>> getUnlabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults, boolean includeTextDetails) {
Map<String, FormField<?>> extractedFieldMap = new TreeMap<>();
documentResultItem.getFields().forEach((key, fieldValue) -> {
FieldText labelText = new FieldText(key, null, fieldValue.getPage(), null);
Integer pageNumber = fieldValue.getPage();
IterableStream<FormContent> formContentList = null;
if (includeTextDetails) {
formContentList = setReferenceElements(fieldValue.getElements(), readResults, pageNumber);
}
FieldText valueText = new FieldText(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
pageNumber, formContentList);
extractedFieldMap.put(key, setFormField(labelText, key, fieldValue, valueText, pageNumber, readResults));
});
return extractedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeTextDetails} is set to true.
*
* @param labelText The label text of the field.
* @param key The name of the field.
* @param fieldValue The named field values returned by the service.
* @param valueText The value text of the field.
* @param pageNumber The 1-based page number.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField<?> setFormField(FieldText labelText, String key, FieldValue fieldValue,
FieldText valueText, Integer pageNumber, List<ReadResult> readResults) {
FormField<?> value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValuePhoneNumber(), valueText, pageNumber);
break;
case STRING:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueString(), valueText, pageNumber);
break;
case TIME:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueTime(), valueText, pageNumber);
break;
case DATE:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueDate(), valueText, pageNumber);
break;
case INTEGER:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueInteger(), valueText, pageNumber);
break;
case NUMBER:
value = new FormField<Number>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, fieldValue.getValueNumber(), valueText, pageNumber);
break;
case ARRAY:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), null, key,
toFormFieldArray(fieldValue.getValueArray(), readResults), null, pageNumber);
break;
case OBJECT:
value = new FormField<>(setDefaultConfidenceValue(fieldValue.getConfidence()), labelText,
key, toFormFieldObject(fieldValue.getValueObject(), pageNumber, readResults), valueText,
pageNumber);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return value;
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in
* {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField<?>> toFormFieldObject(Map<String, FieldValue> valueObject,
Integer pageNumber, List<ReadResult> readResults) {
Map<String, FormField<?>> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key, setFormField(null, key, fieldValue,
new FieldText(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults, pageNumber)
), fieldValue.getPage(), readResults)));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in
* {@link FieldValue
* @param readResults The text extraction result returned by the service.
*
* @return The List of {@link FormField}.
*/
private static List<FormField<?>> toFormFieldArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> setFormField(null, null, fieldValue, null, fieldValue.getPage(), readResults))
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList) {
return new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
DimensionUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
new IterableStream<FormLine>(perPageLineList),
new IterableStream<FormTable>(perPageTableList)
);
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeTextDetails Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField<?>> getLabeledFieldMap(boolean includeTextDetails,
List<ReadResult> readResults,
PageResult pageResultItem, Integer pageNumber) {
Map<String, FormField<?>> formFieldMap = new TreeMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
IterableStream<FormContent> formKeyContentList = null;
IterableStream<FormContent> formValueContentList = null;
if (includeTextDetails) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults, pageNumber);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults,
pageNumber);
}
FieldText labelFieldText = new FieldText(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldText valueText = new FieldText(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField<String> formField = new FormField<>(setDefaultConfidenceValue(keyValuePair.getConfidence()),
labelFieldText, fieldName, keyValuePair.getValue().getText(), valueText, pageNumber);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeTextDetails} set to
* true.
*
* @return The list if referenced elements.
*/
private static IterableStream<FormContent> setReferenceElements(List<String> elements,
List<ReadResult> readResults, Integer pageNumber) {
if (CoreUtils.isNullOrEmpty(elements)) {
return IterableStream.of(null);
}
List<FormContent> formContentList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()));
formContentList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
pageNumber, toWords(textLine.getWords(), pageNumber));
formContentList.add(lineElement);
}
});
return new IterableStream<>(formContentList);
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static IterableStream<FormWord> toWords(List<TextWord> words, Integer pageNumber) {
return new IterableStream<>(words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList()));
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link BoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link BoundingBox}.
*/
private static BoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new BoundingBox(pointList);
}
} |
From what I remember when we are associating requests to responses when using the playback records we explicitly ignore the entire host string. Should be able to simplify the logic to redact the entire host string. | private void redactedAccountName(UrlBuilder urlBuilder) {
String[] hostParts = urlBuilder.getHost().split("\\.");
hostParts[0] = "REDACTED";
urlBuilder.setHost(String.join(".", hostParts));
} | hostParts[0] = "REDACTED"; | private void redactedAccountName(UrlBuilder urlBuilder) {
String[] hostParts = urlBuilder.getHost().split("\\.");
hostParts[0] = "REDACTED";
urlBuilder.setHost(String.join(".", hostParts));
} | class RecordNetworkCallPolicy implements HttpPipelinePolicy {
private static final int DEFAULT_BUFFER_LENGTH = 1024;
private static final String CONTENT_TYPE = "Content-Type";
private static final String CONTENT_ENCODING = "Content-Encoding";
private static final String CONTENT_LENGTH = "Content-Length";
private static final String X_MS_CLIENT_REQUEST_ID = "x-ms-client-request-id";
private static final String X_MS_ENCRYPTION_KEY_SHA256 = "x-ms-encryption-key-sha256";
private static final String X_MS_VERSION = "x-ms-version";
private static final String USER_AGENT = "User-Agent";
private static final String STATUS_CODE = "StatusCode";
private static final String BODY = "Body";
private static final String SIG = "sig";
private static final Pattern DELEGATIONKEY_KEY_PATTERN = Pattern.compile("(?:<Value>)(.*)(?:</Value>)");
private static final Pattern DELEGATIONKEY_CLIENTID_PATTERN = Pattern.compile("(?:<SignedOid>)(.*)(?:</SignedOid>)");
private static final Pattern DELEGATIONKEY_TENANTID_PATTERN = Pattern.compile("(?:<SignedTid>)(.*)(?:</SignedTid>)");
private final ClientLogger logger = new ClientLogger(RecordNetworkCallPolicy.class);
private final RecordedData recordedData;
/**
* Creates a policy that records network calls into {@code recordedData}.
*
* @param recordedData The record to persist network calls into.
*/
public RecordNetworkCallPolicy(RecordedData recordedData) {
Objects.requireNonNull(recordedData, "'recordedData' cannot be null.");
this.recordedData = recordedData;
}
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
final NetworkCallRecord networkCallRecord = new NetworkCallRecord();
Map<String, String> headers = new HashMap<>();
captureRequestHeaders(context.getHttpRequest().getHeaders(), headers,
X_MS_CLIENT_REQUEST_ID,
CONTENT_TYPE,
X_MS_VERSION,
USER_AGENT);
networkCallRecord.setHeaders(headers);
networkCallRecord.setMethod(context.getHttpRequest().getHttpMethod().toString());
UrlBuilder urlBuilder = UrlBuilder.parse(context.getHttpRequest().getUrl());
redactedAccountName(urlBuilder);
if (urlBuilder.getQuery().containsKey(SIG)) {
urlBuilder.setQueryParameter(SIG, "REDACTED");
}
networkCallRecord.setUri(urlBuilder.toString().replaceAll("\\?$", ""));
return next.process()
.doOnError(throwable -> {
networkCallRecord.setException(new NetworkCallError(throwable));
recordedData.addNetworkCall(networkCallRecord);
throw logger.logExceptionAsWarning(Exceptions.propagate(throwable));
}).flatMap(httpResponse -> {
final HttpResponse bufferedResponse = httpResponse.buffer();
return extractResponseData(bufferedResponse).map(responseData -> {
networkCallRecord.setResponse(responseData);
String body = responseData.get(BODY);
if (body != null && body.contains("<Status>InProgress</Status>")
|| Integer.parseInt(responseData.get(STATUS_CODE)) == HttpURLConnection.HTTP_MOVED_TEMP) {
logger.info("Waiting for a response or redirection.");
} else {
recordedData.addNetworkCall(networkCallRecord);
}
return bufferedResponse;
});
});
}
private void captureRequestHeaders(HttpHeaders requestHeaders, Map<String, String> captureHeaders,
String... headerNames) {
for (String headerName : headerNames) {
if (requestHeaders.getValue(headerName) != null) {
captureHeaders.put(headerName, requestHeaders.getValue(headerName));
}
}
}
private Mono<Map<String, String>> extractResponseData(final HttpResponse response) {
final Map<String, String> responseData = new HashMap<>();
responseData.put(STATUS_CODE, Integer.toString(response.getStatusCode()));
boolean addedRetryAfter = false;
for (HttpHeader header : response.getHeaders()) {
String headerValueToStore = header.getValue();
if (header.getName().equalsIgnoreCase("retry-after")) {
headerValueToStore = "0";
addedRetryAfter = true;
} else if (header.getName().equalsIgnoreCase(X_MS_ENCRYPTION_KEY_SHA256)) {
headerValueToStore = "REDACTED";
}
responseData.put(header.getName(), headerValueToStore);
}
if (!addedRetryAfter) {
responseData.put("retry-after", "0");
}
String contentType = response.getHeaderValue(CONTENT_TYPE);
if (contentType == null) {
return response.getBodyAsByteArray().switchIfEmpty(Mono.just(new byte[0])).map(bytes -> {
if (bytes.length == 0) {
return responseData;
}
String content = new String(bytes, StandardCharsets.UTF_8);
responseData.put(CONTENT_LENGTH, Integer.toString(content.length()));
responseData.put(BODY, content);
return responseData;
});
} else if (contentType.equalsIgnoreCase("application/octet-stream")) {
return response.getBodyAsByteArray().switchIfEmpty(Mono.just(new byte[0])).map(bytes -> {
if (bytes.length == 0) {
return responseData;
}
responseData.put(BODY, Arrays.toString(bytes));
return responseData;
});
} else if (contentType.contains("json") || response.getHeaderValue(CONTENT_ENCODING) == null) {
return response.getBodyAsString(StandardCharsets.UTF_8).switchIfEmpty(Mono.just("")).map(content -> {
responseData.put(BODY, redactUserDelegationKey(content));
return responseData;
});
} else {
return response.getBodyAsByteArray().switchIfEmpty(Mono.just(new byte[0])).map(bytes -> {
if (bytes.length == 0) {
return responseData;
}
String content;
if ("gzip".equalsIgnoreCase(response.getHeaderValue(CONTENT_ENCODING))) {
try (GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(bytes));
ByteArrayOutputStream output = new ByteArrayOutputStream()) {
byte[] buffer = new byte[DEFAULT_BUFFER_LENGTH];
int position = 0;
int bytesRead = gis.read(buffer, position, buffer.length);
while (bytesRead != -1) {
output.write(buffer, 0, bytesRead);
position += bytesRead;
bytesRead = gis.read(buffer, position, buffer.length);
}
content = new String(output.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException e) {
throw logger.logExceptionAsWarning(Exceptions.propagate(e));
}
} else {
content = new String(bytes, StandardCharsets.UTF_8);
}
responseData.remove(CONTENT_ENCODING);
responseData.put(CONTENT_LENGTH, Integer.toString(content.length()));
responseData.put(BODY, content);
return responseData;
});
}
}
private String redactUserDelegationKey(String content) {
if (!content.contains("UserDelegationKey")) {
return content;
}
content = redactionReplacement(content, DELEGATIONKEY_KEY_PATTERN.matcher(content), Base64.getEncoder().encodeToString("REDACTED".getBytes(StandardCharsets.UTF_8)));
content = redactionReplacement(content, DELEGATIONKEY_CLIENTID_PATTERN.matcher(content), UUID.randomUUID().toString());
content = redactionReplacement(content, DELEGATIONKEY_TENANTID_PATTERN.matcher(content), UUID.randomUUID().toString());
return content;
}
private String redactionReplacement(String content, Matcher matcher, String replacement) {
while (matcher.find()) {
content = content.replace(matcher.group(1), replacement);
}
return content;
}
} | class RecordNetworkCallPolicy implements HttpPipelinePolicy {
private static final int DEFAULT_BUFFER_LENGTH = 1024;
private static final String CONTENT_TYPE = "Content-Type";
private static final String CONTENT_ENCODING = "Content-Encoding";
private static final String CONTENT_LENGTH = "Content-Length";
private static final String X_MS_CLIENT_REQUEST_ID = "x-ms-client-request-id";
private static final String X_MS_ENCRYPTION_KEY_SHA256 = "x-ms-encryption-key-sha256";
private static final String X_MS_VERSION = "x-ms-version";
private static final String USER_AGENT = "User-Agent";
private static final String STATUS_CODE = "StatusCode";
private static final String BODY = "Body";
private static final String SIG = "sig";
private static final Pattern DELEGATIONKEY_KEY_PATTERN = Pattern.compile("(?:<Value>)(.*)(?:</Value>)");
private static final Pattern DELEGATIONKEY_CLIENTID_PATTERN = Pattern.compile("(?:<SignedOid>)(.*)(?:</SignedOid>)");
private static final Pattern DELEGATIONKEY_TENANTID_PATTERN = Pattern.compile("(?:<SignedTid>)(.*)(?:</SignedTid>)");
private final ClientLogger logger = new ClientLogger(RecordNetworkCallPolicy.class);
private final RecordedData recordedData;
/**
* Creates a policy that records network calls into {@code recordedData}.
*
* @param recordedData The record to persist network calls into.
*/
public RecordNetworkCallPolicy(RecordedData recordedData) {
Objects.requireNonNull(recordedData, "'recordedData' cannot be null.");
this.recordedData = recordedData;
}
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
final NetworkCallRecord networkCallRecord = new NetworkCallRecord();
Map<String, String> headers = new HashMap<>();
captureRequestHeaders(context.getHttpRequest().getHeaders(), headers,
X_MS_CLIENT_REQUEST_ID,
CONTENT_TYPE,
X_MS_VERSION,
USER_AGENT);
networkCallRecord.setHeaders(headers);
networkCallRecord.setMethod(context.getHttpRequest().getHttpMethod().toString());
UrlBuilder urlBuilder = UrlBuilder.parse(context.getHttpRequest().getUrl());
redactedAccountName(urlBuilder);
if (urlBuilder.getQuery().containsKey(SIG)) {
urlBuilder.setQueryParameter(SIG, "REDACTED");
}
networkCallRecord.setUri(urlBuilder.toString().replaceAll("\\?$", ""));
return next.process()
.doOnError(throwable -> {
networkCallRecord.setException(new NetworkCallError(throwable));
recordedData.addNetworkCall(networkCallRecord);
throw logger.logExceptionAsWarning(Exceptions.propagate(throwable));
}).flatMap(httpResponse -> {
final HttpResponse bufferedResponse = httpResponse.buffer();
return extractResponseData(bufferedResponse).map(responseData -> {
networkCallRecord.setResponse(responseData);
String body = responseData.get(BODY);
if (body != null && body.contains("<Status>InProgress</Status>")
|| Integer.parseInt(responseData.get(STATUS_CODE)) == HttpURLConnection.HTTP_MOVED_TEMP) {
logger.info("Waiting for a response or redirection.");
} else {
recordedData.addNetworkCall(networkCallRecord);
}
return bufferedResponse;
});
});
}
private void captureRequestHeaders(HttpHeaders requestHeaders, Map<String, String> captureHeaders,
String... headerNames) {
for (String headerName : headerNames) {
if (requestHeaders.getValue(headerName) != null) {
captureHeaders.put(headerName, requestHeaders.getValue(headerName));
}
}
}
private Mono<Map<String, String>> extractResponseData(final HttpResponse response) {
final Map<String, String> responseData = new HashMap<>();
responseData.put(STATUS_CODE, Integer.toString(response.getStatusCode()));
boolean addedRetryAfter = false;
for (HttpHeader header : response.getHeaders()) {
String headerValueToStore = header.getValue();
if (header.getName().equalsIgnoreCase("retry-after")) {
headerValueToStore = "0";
addedRetryAfter = true;
} else if (header.getName().equalsIgnoreCase(X_MS_ENCRYPTION_KEY_SHA256)) {
headerValueToStore = "REDACTED";
}
responseData.put(header.getName(), headerValueToStore);
}
if (!addedRetryAfter) {
responseData.put("retry-after", "0");
}
String contentType = response.getHeaderValue(CONTENT_TYPE);
if (contentType == null) {
return response.getBodyAsByteArray().switchIfEmpty(Mono.just(new byte[0])).map(bytes -> {
if (bytes.length == 0) {
return responseData;
}
String content = new String(bytes, StandardCharsets.UTF_8);
responseData.put(CONTENT_LENGTH, Integer.toString(content.length()));
responseData.put(BODY, content);
return responseData;
});
} else if (contentType.equalsIgnoreCase("application/octet-stream")) {
return response.getBodyAsByteArray().switchIfEmpty(Mono.just(new byte[0])).map(bytes -> {
if (bytes.length == 0) {
return responseData;
}
responseData.put(BODY, Arrays.toString(bytes));
return responseData;
});
} else if (contentType.contains("json") || response.getHeaderValue(CONTENT_ENCODING) == null) {
return response.getBodyAsString(StandardCharsets.UTF_8).switchIfEmpty(Mono.just("")).map(content -> {
responseData.put(BODY, redactUserDelegationKey(content));
return responseData;
});
} else {
return response.getBodyAsByteArray().switchIfEmpty(Mono.just(new byte[0])).map(bytes -> {
if (bytes.length == 0) {
return responseData;
}
String content;
if ("gzip".equalsIgnoreCase(response.getHeaderValue(CONTENT_ENCODING))) {
try (GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(bytes));
ByteArrayOutputStream output = new ByteArrayOutputStream()) {
byte[] buffer = new byte[DEFAULT_BUFFER_LENGTH];
int position = 0;
int bytesRead = gis.read(buffer, position, buffer.length);
while (bytesRead != -1) {
output.write(buffer, 0, bytesRead);
position += bytesRead;
bytesRead = gis.read(buffer, position, buffer.length);
}
content = new String(output.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException e) {
throw logger.logExceptionAsWarning(Exceptions.propagate(e));
}
} else {
content = new String(bytes, StandardCharsets.UTF_8);
}
responseData.remove(CONTENT_ENCODING);
responseData.put(CONTENT_LENGTH, Integer.toString(content.length()));
responseData.put(BODY, content);
return responseData;
});
}
}
private String redactUserDelegationKey(String content) {
if (!content.contains("UserDelegationKey")) {
return content;
}
content = redactionReplacement(content, DELEGATIONKEY_KEY_PATTERN.matcher(content), Base64.getEncoder().encodeToString("REDACTED".getBytes(StandardCharsets.UTF_8)));
content = redactionReplacement(content, DELEGATIONKEY_CLIENTID_PATTERN.matcher(content), UUID.randomUUID().toString());
content = redactionReplacement(content, DELEGATIONKEY_TENANTID_PATTERN.matcher(content), UUID.randomUUID().toString());
return content;
}
private String redactionReplacement(String content, Matcher matcher, String replacement) {
while (matcher.find()) {
content = content.replace(matcher.group(1), replacement);
}
return content;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.