comment stringlengths 1 45k | method_body stringlengths 23 281k | target_code stringlengths 0 5.16k | method_body_after stringlengths 12 281k | context_before stringlengths 8 543k | context_after stringlengths 8 543k |
|---|---|---|---|---|---|
Could you have a test on setting request id? | public void canCreateAndListIndexerNames() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName);
indexer1.setName("a" + indexer1.getName());
SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName);
indexer2.setName("b" + indexer2.getName());
searchIndexerClient.createIndexer(indexer1);
indexersToDelete.add(indexer1.getName());
searchIndexerClient.createIndexer(indexer2);
indexersToDelete.add(indexer2.getName());
Iterator<String> indexersRes = searchIndexerClient.listIndexerNames(Context.NONE)
.iterator();
String actualIndexer = indexersRes.next();
assertNotNull(actualIndexer);
assertEquals(indexer1.getName(), actualIndexer);
actualIndexer = indexersRes.next();
assertNotNull(actualIndexer);
assertEquals(indexer2.getName(), actualIndexer);
assertFalse(indexersRes.hasNext());
} | Iterator<String> indexersRes = searchIndexerClient.listIndexerNames(Context.NONE) | public void canCreateAndListIndexerNames() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName);
mutateName(indexer1, "a" + indexer1.getName());
SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName);
mutateName(indexer2, "b" + indexer2.getName());
searchIndexerClient.createIndexer(indexer1);
indexersToDelete.add(indexer1.getName());
searchIndexerClient.createIndexer(indexer2);
indexersToDelete.add(indexer2.getName());
Iterator<String> indexersRes = searchIndexerClient.listIndexerNames(Context.NONE)
.iterator();
String actualIndexer = indexersRes.next();
assertNotNull(actualIndexer);
assertEquals(indexer1.getName(), actualIndexer);
actualIndexer = indexersRes.next();
assertNotNull(actualIndexer);
assertEquals(indexer2.getName(), actualIndexer);
assertFalse(indexersRes.hasNext());
} | class IndexersManagementSyncTests extends SearchTestBase {
private static final String TARGET_INDEX_NAME = "indexforindexers";
private static final HttpPipelinePolicy MOCK_STATUS_PIPELINE_POLICY =
new CustomQueryPipelinePolicy("mock_status", "inProgress");
private final List<String> dataSourcesToDelete = new ArrayList<>();
private final List<String> indexersToDelete = new ArrayList<>();
private final List<String> indexesToDelete = new ArrayList<>();
private final List<String> skillsetsToDelete = new ArrayList<>();
private SearchIndexerClient searchIndexerClient;
private SearchIndexClient searchIndexClient;
private String createDataSource() {
SearchIndexerDataSourceConnection dataSource = createTestSqlDataSourceObject();
searchIndexerClient.createOrUpdateDataSourceConnection(dataSource);
dataSourcesToDelete.add(dataSource.getName());
return dataSource.getName();
}
private String createIndex() {
SearchIndex index = createTestIndexForLiveDatasource();
searchIndexClient.createIndex(index);
indexesToDelete.add(index.getName());
return index.getName();
}
private SearchIndexer createTestDataSourceAndIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
return indexer;
}
/**
* Creates the index and indexer in the search service and then retrieves the indexer and validates it
*
* @param indexer the indexer to be created
*/
private void createAndValidateIndexer(SearchIndexer indexer) {
SearchIndexer indexerResponse = searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexerResponse.getName());
setSameStartTime(indexer, indexerResponse);
assertObjectEquals(indexer, indexerResponse, true, "etag");
}
@Override
protected void beforeTest() {
super.beforeTest();
searchIndexerClient = getSearchIndexerClientBuilder().buildClient();
searchIndexClient = getSearchIndexClientBuilder().buildClient();
}
@Override
protected void afterTest() {
super.afterTest();
for (String skillset : skillsetsToDelete) {
searchIndexerClient.deleteSkillset(skillset);
}
for (String dataSource : dataSourcesToDelete) {
searchIndexerClient.deleteDataSourceConnection(dataSource);
}
for (String indexer : indexersToDelete) {
searchIndexerClient.deleteIndexer(indexer);
}
for (String index : indexesToDelete) {
searchIndexClient.deleteIndex(index);
}
}
@Test
public void createIndexerReturnsCorrectDefinition() {
SearchIndexer expectedIndexer = createBaseTestIndexerObject(createIndex(), createDataSource())
.setIsDisabled(true)
.setParameters(new IndexingParameters()
.setBatchSize(50)
.setMaxFailedItems(10)
.setMaxFailedItemsPerBatch(10));
SearchIndexer actualIndexer = searchIndexerClient.createIndexer(expectedIndexer);
indexersToDelete.add(actualIndexer.getName());
expectedIndexer.setParameters(new IndexingParameters()
.setConfiguration(Collections.emptyMap()));
setSameStartTime(expectedIndexer, actualIndexer);
assertObjectEquals(expectedIndexer, actualIndexer, true, "etag");
}
@Test
public void canCreateAndListIndexers() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName);
indexer1.setName("a" + indexer1.getName());
SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName);
indexer2.setName("b" + indexer2.getName());
searchIndexerClient.createIndexer(indexer1);
indexersToDelete.add(indexer1.getName());
searchIndexerClient.createIndexer(indexer2);
indexersToDelete.add(indexer2.getName());
Iterator<SearchIndexer> indexers = searchIndexerClient.listIndexers().iterator();
SearchIndexer returnedIndexer = indexers.next();
assertObjectEquals(indexer1, returnedIndexer, true, "etag");
returnedIndexer = indexers.next();
assertObjectEquals(indexer2, returnedIndexer, true, "etag");
assertFalse(indexers.hasNext());
}
@Test
@Test
public void createIndexerFailsWithUsefulMessageOnUserError() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), "thisdatasourcedoesnotexist");
assertHttpResponseException(
() -> searchIndexerClient.createIndexer(indexer),
HttpURLConnection.HTTP_BAD_REQUEST,
"This indexer refers to a data source 'thisdatasourcedoesnotexist' that doesn't exist");
}
@Test
public void canResetIndexerAndGetIndexerStatus() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.resetIndexer(indexer.getName());
SearchIndexerStatus indexerStatus = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerStatus.getStatus());
assertEquals(IndexerExecutionStatus.RESET, indexerStatus.getLastResult().getStatus());
}
@Test
public void canResetIndexerAndGetIndexerStatusWithResponse() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.resetIndexerWithResponse(indexer.getName(), Context.NONE);
SearchIndexerStatus indexerStatusResponse = searchIndexerClient.getIndexerStatusWithResponse(indexer.getName(),
Context.NONE).getValue();
assertEquals(IndexerStatus.RUNNING, indexerStatusResponse.getStatus());
assertEquals(IndexerExecutionStatus.RESET, indexerStatusResponse.getLastResult().getStatus());
}
@Test
public void canRunIndexer() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.runIndexer(indexer.getName());
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
}
@Test
public void canRunIndexerWithResponse() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
Response<Void> response = searchIndexerClient.runIndexerWithResponse(indexer.getName(), Context.NONE);
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(HttpURLConnection.HTTP_ACCEPTED, response.getStatusCode());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
}
@Test
public void canRunIndexerAndGetIndexerStatus() {
searchIndexerClient = getSearchIndexerClientBuilder(MOCK_STATUS_PIPELINE_POLICY).buildClient();
searchIndexClient = getSearchIndexClientBuilder(MOCK_STATUS_PIPELINE_POLICY).buildClient();
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
Response<Void> indexerRunResponse = searchIndexerClient.runIndexerWithResponse(indexer.getName(),
Context.NONE);
assertEquals(HttpResponseStatus.ACCEPTED.code(), indexerRunResponse.getStatusCode());
indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertValidSearchIndexerStatus(indexerExecutionInfo);
}
@Test
public void canUpdateIndexer() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentDescription(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canUpdateIndexerFieldMapping() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentFieldMapping(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithFieldMapping() {
SearchIndexer indexer = createIndexerWithDifferentFieldMapping(createIndex(), createDataSource());
createAndValidateIndexer(indexer);
}
@Test
public void canUpdateIndexerDisabled() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createDisabledIndexer(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canUpdateIndexerSchedule() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentSchedule(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithSchedule() {
SearchIndexer indexer = createIndexerWithDifferentSchedule(createIndex(), createDataSource());
createAndValidateIndexer(indexer);
}
@Test
public void canUpdateIndexerBatchSizeMaxFailedItems() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentIndexingParameters(initial);
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithBatchSizeMaxFailedItems() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer updatedExpected = createIndexerWithDifferentIndexingParameters(indexer);
createAndValidateIndexer(updatedExpected);
}
@Test
public void canUpdateIndexerBlobParams() {
String indexName = createIndex();
String dataSourceName = searchIndexerClient.createDataSourceConnection(createBlobDataSource()).getName();
dataSourcesToDelete.add(dataSourceName);
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithStorageConfig(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithBlobParams() {
SearchIndexerDataSourceConnection blobDataSource = createBlobDataSource();
SearchIndexerDataSourceConnection dataSource = searchIndexerClient.createOrUpdateDataSourceConnection(blobDataSource);
dataSourcesToDelete.add(dataSource.getName());
SearchIndexer indexer = createIndexerWithStorageConfig(createIndex(), dataSource.getName());
createAndValidateIndexer(indexer);
}
@Test
public void canCreateAndDeleteIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
searchIndexerClient.deleteIndexer(indexer.getName());
assertThrows(HttpResponseException.class, () -> searchIndexerClient.getIndexer(indexer.getName()));
}
@Test
public void canCreateAndDeleteIndexerWithResponse() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexerWithResponse(indexer, Context.NONE);
searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertThrows(HttpResponseException.class, () -> searchIndexerClient.getIndexer(indexer.getName()));
}
@Test
public void deleteIndexerIsIdempotent() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
Response<Void> result = searchIndexerClient.deleteIndexerWithResponse(indexer, false,
Context.NONE);
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, result.getStatusCode());
searchIndexerClient.createIndexer(indexer);
result = searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, result.getStatusCode());
result = searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, result.getStatusCode());
}
@Test
public void canCreateAndGetIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
SearchIndexer indexerResult = searchIndexerClient.getIndexer(indexer.getName());
assertObjectEquals(indexer, indexerResult, true, "etag");
indexerResult = searchIndexerClient.getIndexerWithResponse(indexer.getName(), Context.NONE)
.getValue();
assertObjectEquals(indexer, indexerResult, true, "etag");
}
@Test
public void getIndexerThrowsOnNotFound() {
assertHttpResponseException(
() -> searchIndexerClient.getIndexer("thisindexerdoesnotexist"),
HttpURLConnection.HTTP_NOT_FOUND,
"Indexer 'thisindexerdoesnotexist' was not found");
}
@Test
public void createOrUpdateIndexerIfNotExistsSucceedsOnNoResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer created = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, true, Context.NONE)
.getValue();
indexersToDelete.add(created.getName());
assertFalse(CoreUtils.isNullOrEmpty(created.getETag()));
}
@Test
public void deleteIndexerIfExistsWorksOnlyWhenResourceExists() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer created = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
searchIndexerClient.deleteIndexerWithResponse(created, true, Context.NONE);
try {
searchIndexerClient.deleteIndexerWithResponse(created, true, Context.NONE);
fail("deleteFunc should have failed due to non existent resource.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
}
@Test
public void deleteIndexerIfNotChangedWorksOnlyOnCurrentResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer stale = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, true, Context.NONE)
.getValue();
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(stale, false, Context.NONE)
.getValue();
try {
searchIndexerClient.deleteIndexerWithResponse(stale, true, Context.NONE);
fail("deleteFunc should have failed due to precondition.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
searchIndexerClient.deleteIndexerWithResponse(updated, true, Context.NONE);
}
@Test
public void updateIndexerIfExistsSucceedsOnExistingResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
false, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void updateIndexerIfNotChangedFailsWhenResourceChanged() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
true, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
try {
searchIndexerClient.createOrUpdateIndexerWithResponse(original, true, Context.NONE);
fail("createOrUpdateDefinition should have failed due to precondition.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
assertFalse(CoreUtils.isNullOrEmpty(originalETag));
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void updateIndexerIfNotChangedSucceedsWhenResourceUnchanged() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
true, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
assertFalse(CoreUtils.isNullOrEmpty(originalETag));
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void canUpdateIndexerSkillset() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexerSkillset skillset = createSkillsetObject();
searchIndexerClient.createSkillset(skillset);
skillsetsToDelete.add(skillset.getName());
SearchIndexer updated = createIndexerWithDifferentSkillset(indexName, dataSourceName, skillset.getName())
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithSkillset() {
SearchIndexerSkillset skillset = searchIndexerClient.createSkillset(createSkillsetObject());
skillsetsToDelete.add(skillset.getName());
SearchIndexer indexer = createIndexerWithDifferentSkillset(createIndex(), createDataSource(), skillset.getName());
createAndValidateIndexer(indexer);
}
/**
* Create a new valid skillset object
*
* @return the newly created skillset object
*/
SearchIndexerSkillset createSkillsetObject() {
List<InputFieldMappingEntry> inputs = Arrays.asList(
new InputFieldMappingEntry()
.setName("url")
.setSource("/document/url"),
new InputFieldMappingEntry()
.setName("queryString")
.setSource("/document/queryString")
);
List<OutputFieldMappingEntry> outputs = Collections.singletonList(
new OutputFieldMappingEntry()
.setName("text")
.setTargetName("mytext")
);
List<SearchIndexerSkill> skills = Collections.singletonList(
new OcrSkill()
.setShouldDetectOrientation(true)
.setName("myocr")
.setDescription("Tested OCR skill")
.setContext("/document")
.setInputs(inputs)
.setOutputs(outputs)
);
return new SearchIndexerSkillset()
.setName(testResourceNamer.randomName("ocr-skillset", 32))
.setDescription("Skillset for testing default configuration")
.setSkills(skills);
}
SearchIndexer createBaseTestIndexerObject(String targetIndexName, String dataSourceName) {
return new SearchIndexer()
.setName(testResourceNamer.randomName("indexer", 32))
.setTargetIndexName(targetIndexName)
.setDataSourceName(dataSourceName)
.setSchedule(new IndexingSchedule().setInterval(Duration.ofDays(1)));
}
/**
* This index contains fields that are declared on the live data source we use to test the indexers
*
* @return the newly created Index object
*/
SearchIndex createTestIndexForLiveDatasource() {
return new SearchIndex()
.setName(testResourceNamer.randomName(IndexersManagementSyncTests.TARGET_INDEX_NAME, 32))
.setFields(Arrays.asList(
new SearchField()
.setName("county_name")
.setType(SearchFieldDataType.STRING)
.setSearchable(Boolean.FALSE)
.setFilterable(Boolean.TRUE),
new SearchField()
.setName("state")
.setType(SearchFieldDataType.STRING)
.setSearchable(Boolean.TRUE)
.setFilterable(Boolean.TRUE),
new SearchField()
.setName("feature_id")
.setType(SearchFieldDataType.STRING)
.setKey(Boolean.TRUE)
.setSearchable(Boolean.TRUE)
.setFilterable(Boolean.FALSE)));
}
/**
* Create a new indexer and change its description property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentDescription(String targetIndexName, String dataSourceName) {
return createBaseTestIndexerObject(targetIndexName, dataSourceName)
.setDescription("somethingdifferent");
}
/**
* Create a new indexer and change its field mappings property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentFieldMapping(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
List<FieldMapping> fieldMappings = Collections.singletonList(new FieldMapping()
.setSourceFieldName("state_alpha")
.setTargetFieldName("state"));
indexer.setFieldMappings(fieldMappings);
return indexer;
}
/**
* Create a new indexer and set the Disabled property to true
*
* @return the created indexer
*/
SearchIndexer createDisabledIndexer(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
indexer.setIsDisabled(false);
return indexer;
}
/**
* Create a new indexer and change its schedule property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentSchedule(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
IndexingSchedule is = new IndexingSchedule()
.setInterval(Duration.ofMinutes(10));
indexer.setSchedule(is);
return indexer;
}
/**
* Create a new indexer and change its skillset
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentSkillset(String targetIndexName, String dataSourceName, String skillsetName) {
return createBaseTestIndexerObject(targetIndexName, dataSourceName)
.setSkillsetName(skillsetName);
}
/**
* Create a new indexer and change its indexing parameters
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentIndexingParameters(SearchIndexer indexer) {
IndexingParameters ip = new IndexingParameters()
.setMaxFailedItems(121)
.setMaxFailedItemsPerBatch(11)
.setBatchSize(20);
indexer.setParameters(ip);
return indexer;
}
SearchIndexer createIndexerWithStorageConfig(String targetIndexName, String dataSourceName) {
SearchIndexer updatedExpected = createBaseTestIndexerObject(targetIndexName, dataSourceName);
HashMap<String, Object> config = new HashMap<>();
config.put("indexedFileNameExtensions", ".pdf,.docx");
config.put("excludedFileNameExtensions", ".xlsx");
config.put("dataToExtract", "storageMetadata");
config.put("failOnUnsupportedContentType", false);
IndexingParameters ip = new IndexingParameters()
.setConfiguration(config);
updatedExpected.setParameters(ip);
return updatedExpected;
}
void setSameStartTime(SearchIndexer expected, SearchIndexer actual) {
expected.getSchedule().setStartTime(actual.getSchedule().getStartTime());
}
void assertStartAndEndTimeValid(IndexerExecutionResult result) {
assertNotNull(result.getStartTime());
assertNotEquals(OffsetDateTime.now(), result.getStartTime());
assertNotNull(result.getEndTime());
assertNotEquals(OffsetDateTime.now(), result.getEndTime());
}
void assertValidSearchIndexerStatus(SearchIndexerStatus indexerExecutionInfo) {
assertEquals(IndexerExecutionStatus.IN_PROGRESS, indexerExecutionInfo.getLastResult().getStatus());
assertEquals(3, indexerExecutionInfo.getExecutionHistory().size());
SearchIndexerLimits limits = indexerExecutionInfo.getLimits();
assertNotNull(limits);
assertEquals(100000, limits.getMaxDocumentContentCharactersToExtract(), 0);
assertEquals(1000, limits.getMaxDocumentExtractionSize(), 0);
IndexerExecutionResult newestResult = indexerExecutionInfo.getExecutionHistory().get(0);
IndexerExecutionResult middleResult = indexerExecutionInfo.getExecutionHistory().get(1);
IndexerExecutionResult oldestResult = indexerExecutionInfo.getExecutionHistory().get(2);
assertEquals(IndexerExecutionStatus.TRANSIENT_FAILURE, newestResult.getStatus());
assertEquals("The indexer could not connect to the data source",
newestResult.getErrorMessage());
assertStartAndEndTimeValid(newestResult);
assertEquals(IndexerExecutionStatus.RESET, middleResult.getStatus());
assertStartAndEndTimeValid(middleResult);
assertEquals(IndexerExecutionStatus.SUCCESS, oldestResult.getStatus());
assertEquals(124876, oldestResult.getItemCount());
assertEquals(2, oldestResult.getFailedItemCount());
assertEquals("100", oldestResult.getInitialTrackingState());
assertEquals("200", oldestResult.getFinalTrackingState());
assertStartAndEndTimeValid(oldestResult);
assertEquals(2, oldestResult.getErrors().size());
assertEquals("1", oldestResult.getErrors().get(0).getKey());
assertEquals("Key field contains unsafe characters",
oldestResult.getErrors().get(0).getErrorMessage());
assertEquals("DocumentExtraction.AzureBlob.MyDataSource",
oldestResult.getErrors().get(0).getName());
assertEquals("The file could not be parsed.", oldestResult.getErrors().get(0).getDetails());
assertEquals("https:
oldestResult.getErrors().get(0).getDocumentationLink());
assertEquals("121713", oldestResult.getErrors().get(1).getKey());
assertEquals("Item is too large", oldestResult.getErrors().get(1).getErrorMessage());
assertEquals("DocumentExtraction.AzureBlob.DataReader",
oldestResult.getErrors().get(1).getName());
assertEquals("Blob size cannot exceed 256 MB.", oldestResult.getErrors().get(1).getDetails());
assertEquals("https:
oldestResult.getErrors().get(1).getDocumentationLink());
assertEquals(1, oldestResult.getWarnings().size());
assertEquals("2", oldestResult.getWarnings().get(0).getKey());
assertEquals("Document was truncated to 50000 characters.",
oldestResult.getWarnings().get(0).getMessage());
assertEquals("Enrichment.LanguageDetectionSkill.
oldestResult.getWarnings().get(0).getName());
assertEquals("Try to split the input into smaller chunks using Split skill.",
oldestResult.getWarnings().get(0).getDetails());
assertEquals("https:
oldestResult.getWarnings().get(0).getDocumentationLink());
}
} | class IndexersManagementSyncTests extends SearchTestBase {
private static final String TARGET_INDEX_NAME = "indexforindexers";
private static final HttpPipelinePolicy MOCK_STATUS_PIPELINE_POLICY =
new CustomQueryPipelinePolicy("mock_status", "inProgress");
private final List<String> dataSourcesToDelete = new ArrayList<>();
private final List<String> indexersToDelete = new ArrayList<>();
private final List<String> indexesToDelete = new ArrayList<>();
private final List<String> skillsetsToDelete = new ArrayList<>();
private SearchIndexerClient searchIndexerClient;
private SearchIndexClient searchIndexClient;
private String createDataSource() {
SearchIndexerDataSourceConnection dataSource = createTestSqlDataSourceObject();
searchIndexerClient.createOrUpdateDataSourceConnection(dataSource);
dataSourcesToDelete.add(dataSource.getName());
return dataSource.getName();
}
private String createIndex() {
SearchIndex index = createTestIndexForLiveDatasource();
searchIndexClient.createIndex(index);
indexesToDelete.add(index.getName());
return index.getName();
}
private SearchIndexer createTestDataSourceAndIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
return indexer;
}
/**
* Creates the index and indexer in the search service and then retrieves the indexer and validates it
*
* @param indexer the indexer to be created
*/
private void createAndValidateIndexer(SearchIndexer indexer) {
SearchIndexer indexerResponse = searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexerResponse.getName());
setSameStartTime(indexer, indexerResponse);
assertObjectEquals(indexer, indexerResponse, true, "etag");
}
@Override
protected void beforeTest() {
super.beforeTest();
searchIndexerClient = getSearchIndexerClientBuilder().buildClient();
searchIndexClient = getSearchIndexClientBuilder().buildClient();
}
@Override
protected void afterTest() {
super.afterTest();
for (String skillset : skillsetsToDelete) {
searchIndexerClient.deleteSkillset(skillset);
}
for (String dataSource : dataSourcesToDelete) {
searchIndexerClient.deleteDataSourceConnection(dataSource);
}
for (String indexer : indexersToDelete) {
searchIndexerClient.deleteIndexer(indexer);
}
for (String index : indexesToDelete) {
searchIndexClient.deleteIndex(index);
}
}
@Test
public void createIndexerReturnsCorrectDefinition() {
SearchIndexer expectedIndexer = createBaseTestIndexerObject(createIndex(), createDataSource())
.setIsDisabled(true)
.setParameters(new IndexingParameters()
.setBatchSize(50)
.setMaxFailedItems(10)
.setMaxFailedItemsPerBatch(10));
SearchIndexer actualIndexer = searchIndexerClient.createIndexer(expectedIndexer);
indexersToDelete.add(actualIndexer.getName());
expectedIndexer.setParameters(new IndexingParameters()
.setConfiguration(Collections.emptyMap()));
setSameStartTime(expectedIndexer, actualIndexer);
assertObjectEquals(expectedIndexer, actualIndexer, true, "etag");
}
@Test
public void canCreateAndListIndexers() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName);
mutateName(indexer1, "a" + indexer1.getName());
SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName);
mutateName(indexer2, "b" + indexer2.getName());
searchIndexerClient.createIndexer(indexer1);
indexersToDelete.add(indexer1.getName());
searchIndexerClient.createIndexer(indexer2);
indexersToDelete.add(indexer2.getName());
Iterator<SearchIndexer> indexers = searchIndexerClient.listIndexers().iterator();
SearchIndexer returnedIndexer = indexers.next();
assertObjectEquals(indexer1, returnedIndexer, true, "etag");
returnedIndexer = indexers.next();
assertObjectEquals(indexer2, returnedIndexer, true, "etag");
assertFalse(indexers.hasNext());
}
@Test
@Test
public void createIndexerFailsWithUsefulMessageOnUserError() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), "thisdatasourcedoesnotexist");
assertHttpResponseException(
() -> searchIndexerClient.createIndexer(indexer),
HttpURLConnection.HTTP_BAD_REQUEST,
"This indexer refers to a data source 'thisdatasourcedoesnotexist' that doesn't exist");
}
@Test
public void canResetIndexerAndGetIndexerStatus() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.resetIndexer(indexer.getName());
SearchIndexerStatus indexerStatus = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerStatus.getStatus());
assertEquals(IndexerExecutionStatus.RESET, indexerStatus.getLastResult().getStatus());
}
@Test
public void canResetIndexerAndGetIndexerStatusWithResponse() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.resetIndexerWithResponse(indexer.getName(), Context.NONE);
SearchIndexerStatus indexerStatusResponse = searchIndexerClient.getIndexerStatusWithResponse(indexer.getName(),
Context.NONE).getValue();
assertEquals(IndexerStatus.RUNNING, indexerStatusResponse.getStatus());
assertEquals(IndexerExecutionStatus.RESET, indexerStatusResponse.getLastResult().getStatus());
}
@Test
public void canRunIndexer() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.runIndexer(indexer.getName());
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
}
@Test
public void canRunIndexerWithResponse() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
Response<Void> response = searchIndexerClient.runIndexerWithResponse(indexer.getName(), Context.NONE);
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(HttpURLConnection.HTTP_ACCEPTED, response.getStatusCode());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
}
@Test
public void canRunIndexerAndGetIndexerStatus() {
searchIndexerClient = getSearchIndexerClientBuilder(MOCK_STATUS_PIPELINE_POLICY).buildClient();
searchIndexClient = getSearchIndexClientBuilder().addPolicy(MOCK_STATUS_PIPELINE_POLICY).buildClient();
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
Response<Void> indexerRunResponse = searchIndexerClient.runIndexerWithResponse(indexer.getName(),
Context.NONE);
assertEquals(HttpResponseStatus.ACCEPTED.code(), indexerRunResponse.getStatusCode());
indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertValidSearchIndexerStatus(indexerExecutionInfo);
}
@Test
public void canUpdateIndexer() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentDescription(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canUpdateIndexerFieldMapping() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentFieldMapping(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithFieldMapping() {
SearchIndexer indexer = createIndexerWithDifferentFieldMapping(createIndex(), createDataSource());
createAndValidateIndexer(indexer);
}
@Test
public void canUpdateIndexerDisabled() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createDisabledIndexer(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canUpdateIndexerSchedule() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentSchedule(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithSchedule() {
SearchIndexer indexer = createIndexerWithDifferentSchedule(createIndex(), createDataSource());
createAndValidateIndexer(indexer);
}
@Test
public void canUpdateIndexerBatchSizeMaxFailedItems() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentIndexingParameters(initial);
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithBatchSizeMaxFailedItems() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer updatedExpected = createIndexerWithDifferentIndexingParameters(indexer);
createAndValidateIndexer(updatedExpected);
}
@Test
public void canUpdateIndexerBlobParams() {
String indexName = createIndex();
String dataSourceName = searchIndexerClient.createDataSourceConnection(createBlobDataSource()).getName();
dataSourcesToDelete.add(dataSourceName);
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithStorageConfig(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithBlobParams() {
SearchIndexerDataSourceConnection blobDataSource = createBlobDataSource();
SearchIndexerDataSourceConnection dataSource = searchIndexerClient.createOrUpdateDataSourceConnection(blobDataSource);
dataSourcesToDelete.add(dataSource.getName());
SearchIndexer indexer = createIndexerWithStorageConfig(createIndex(), dataSource.getName());
createAndValidateIndexer(indexer);
}
@Test
public void canCreateAndDeleteIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
searchIndexerClient.deleteIndexer(indexer.getName());
assertThrows(HttpResponseException.class, () -> searchIndexerClient.getIndexer(indexer.getName()));
}
@Test
public void canCreateAndDeleteIndexerWithResponse() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexerWithResponse(indexer, Context.NONE);
searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertThrows(HttpResponseException.class, () -> searchIndexerClient.getIndexer(indexer.getName()));
}
@Test
public void deleteIndexerIsIdempotent() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
Response<Void> result = searchIndexerClient.deleteIndexerWithResponse(indexer, false,
Context.NONE);
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, result.getStatusCode());
searchIndexerClient.createIndexer(indexer);
result = searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, result.getStatusCode());
result = searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, result.getStatusCode());
}
@Test
public void canCreateAndGetIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
SearchIndexer indexerResult = searchIndexerClient.getIndexer(indexer.getName());
assertObjectEquals(indexer, indexerResult, true, "etag");
indexerResult = searchIndexerClient.getIndexerWithResponse(indexer.getName(), Context.NONE)
.getValue();
assertObjectEquals(indexer, indexerResult, true, "etag");
}
@Test
public void getIndexerThrowsOnNotFound() {
assertHttpResponseException(
() -> searchIndexerClient.getIndexer("thisindexerdoesnotexist"),
HttpURLConnection.HTTP_NOT_FOUND,
"Indexer 'thisindexerdoesnotexist' was not found");
}
@Test
public void createOrUpdateIndexerIfNotExistsSucceedsOnNoResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer created = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, true, Context.NONE)
.getValue();
indexersToDelete.add(created.getName());
assertFalse(CoreUtils.isNullOrEmpty(created.getETag()));
}
@Test
public void deleteIndexerIfExistsWorksOnlyWhenResourceExists() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer created = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
searchIndexerClient.deleteIndexerWithResponse(created, true, Context.NONE);
try {
searchIndexerClient.deleteIndexerWithResponse(created, true, Context.NONE);
fail("deleteFunc should have failed due to non existent resource.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
}
@Test
public void deleteIndexerIfNotChangedWorksOnlyOnCurrentResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer stale = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, true, Context.NONE)
.getValue();
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(stale, false, Context.NONE)
.getValue();
try {
searchIndexerClient.deleteIndexerWithResponse(stale, true, Context.NONE);
fail("deleteFunc should have failed due to precondition.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
searchIndexerClient.deleteIndexerWithResponse(updated, true, Context.NONE);
}
@Test
public void updateIndexerIfExistsSucceedsOnExistingResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
false, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void updateIndexerIfNotChangedFailsWhenResourceChanged() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
true, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
try {
searchIndexerClient.createOrUpdateIndexerWithResponse(original, true, Context.NONE);
fail("createOrUpdateDefinition should have failed due to precondition.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
assertFalse(CoreUtils.isNullOrEmpty(originalETag));
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void updateIndexerIfNotChangedSucceedsWhenResourceUnchanged() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
true, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
assertFalse(CoreUtils.isNullOrEmpty(originalETag));
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void canUpdateIndexerSkillset() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexerSkillset skillset = createSkillsetObject();
searchIndexerClient.createSkillset(skillset);
skillsetsToDelete.add(skillset.getName());
SearchIndexer updated = createIndexerWithDifferentSkillset(indexName, dataSourceName, skillset.getName());
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithSkillset() {
SearchIndexerSkillset skillset = searchIndexerClient.createSkillset(createSkillsetObject());
skillsetsToDelete.add(skillset.getName());
SearchIndexer indexer = createIndexerWithDifferentSkillset(createIndex(), createDataSource(), skillset.getName());
createAndValidateIndexer(indexer);
}
void mutateName(SearchIndexer updateIndexer, String indexerName) {
try {
Field updateField = updateIndexer.getClass().getDeclaredField("name");
updateField.setAccessible(true);
updateField.set(updateIndexer, indexerName);
} catch (Exception e) {
throw Exceptions.propagate(e);
}
}
/**
* Create a new valid skillset object
*
* @return the newly created skillset object
*/
SearchIndexerSkillset createSkillsetObject() {
List<InputFieldMappingEntry> inputs = Arrays.asList(
new InputFieldMappingEntry("url")
.setSource("/document/url"),
new InputFieldMappingEntry("queryString")
.setSource("/document/queryString")
);
List<OutputFieldMappingEntry> outputs = Collections.singletonList(
new OutputFieldMappingEntry("text")
.setTargetName("mytext")
);
List<SearchIndexerSkill> skills = Collections.singletonList(
new OcrSkill(inputs, outputs)
.setShouldDetectOrientation(true)
.setName("myocr")
.setDescription("Tested OCR skill")
.setContext("/document")
);
return new SearchIndexerSkillset(testResourceNamer.randomName("ocr-skillset", 32))
.setDescription("Skillset for testing default configuration")
.setSkills(skills);
}
SearchIndexer createBaseTestIndexerObject(String targetIndexName, String dataSourceName) {
return new SearchIndexer(testResourceNamer.randomName("indexer", 32), dataSourceName, targetIndexName)
.setSchedule(new IndexingSchedule(Duration.ofDays(1)));
}
/**
* This index contains fields that are declared on the live data source we use to test the indexers
*
* @return the newly created Index object
*/
SearchIndex createTestIndexForLiveDatasource() {
return new SearchIndex(testResourceNamer.randomName(IndexersManagementSyncTests.TARGET_INDEX_NAME, 32))
.setFields(Arrays.asList(
new SearchField("county_name", SearchFieldDataType.STRING)
.setSearchable(Boolean.FALSE)
.setFilterable(Boolean.TRUE),
new SearchField("state", SearchFieldDataType.STRING)
.setSearchable(Boolean.TRUE)
.setFilterable(Boolean.TRUE),
new SearchField("feature_id", SearchFieldDataType.STRING)
.setKey(Boolean.TRUE)
.setSearchable(Boolean.TRUE)
.setFilterable(Boolean.FALSE)));
}
/**
* Create a new indexer and change its description property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentDescription(String targetIndexName, String dataSourceName) {
return createBaseTestIndexerObject(targetIndexName, dataSourceName)
.setDescription("somethingdifferent");
}
/**
* Create a new indexer and change its field mappings property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentFieldMapping(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
List<FieldMapping> fieldMappings = Collections.singletonList(new FieldMapping("state_alpha")
.setTargetFieldName("state"));
indexer.setFieldMappings(fieldMappings);
return indexer;
}
/**
* Create a new indexer and set the Disabled property to true
*
* @return the created indexer
*/
SearchIndexer createDisabledIndexer(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
indexer.setIsDisabled(false);
return indexer;
}
/**
* Create a new indexer and change its schedule property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentSchedule(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
IndexingSchedule is = new IndexingSchedule(Duration.ofMinutes(10));
indexer.setSchedule(is);
return indexer;
}
/**
* Create a new indexer and change its skillset
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentSkillset(String targetIndexName, String dataSourceName, String skillsetName) {
return createBaseTestIndexerObject(targetIndexName, dataSourceName)
.setSkillsetName(skillsetName);
}
/**
* Create a new indexer and change its indexing parameters
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentIndexingParameters(SearchIndexer indexer) {
IndexingParameters ip = new IndexingParameters()
.setMaxFailedItems(121)
.setMaxFailedItemsPerBatch(11)
.setBatchSize(20);
indexer.setParameters(ip);
return indexer;
}
SearchIndexer createIndexerWithStorageConfig(String targetIndexName, String dataSourceName) {
SearchIndexer updatedExpected = createBaseTestIndexerObject(targetIndexName, dataSourceName);
HashMap<String, Object> config = new HashMap<>();
config.put("indexedFileNameExtensions", ".pdf,.docx");
config.put("excludedFileNameExtensions", ".xlsx");
config.put("dataToExtract", "storageMetadata");
config.put("failOnUnsupportedContentType", false);
IndexingParameters ip = new IndexingParameters()
.setConfiguration(config);
updatedExpected.setParameters(ip);
return updatedExpected;
}
void setSameStartTime(SearchIndexer expected, SearchIndexer actual) {
expected.getSchedule().setStartTime(actual.getSchedule().getStartTime());
}
void assertStartAndEndTimeValid(IndexerExecutionResult result) {
assertNotNull(result.getStartTime());
assertNotEquals(OffsetDateTime.now(), result.getStartTime());
assertNotNull(result.getEndTime());
assertNotEquals(OffsetDateTime.now(), result.getEndTime());
}
void assertValidSearchIndexerStatus(SearchIndexerStatus indexerExecutionInfo) {
assertEquals(IndexerExecutionStatus.IN_PROGRESS, indexerExecutionInfo.getLastResult().getStatus());
assertEquals(3, indexerExecutionInfo.getExecutionHistory().size());
SearchIndexerLimits limits = indexerExecutionInfo.getLimits();
assertNotNull(limits);
assertEquals(100000, limits.getMaxDocumentContentCharactersToExtract(), 0);
assertEquals(1000, limits.getMaxDocumentExtractionSize(), 0);
IndexerExecutionResult newestResult = indexerExecutionInfo.getExecutionHistory().get(0);
IndexerExecutionResult middleResult = indexerExecutionInfo.getExecutionHistory().get(1);
IndexerExecutionResult oldestResult = indexerExecutionInfo.getExecutionHistory().get(2);
assertEquals(IndexerExecutionStatus.TRANSIENT_FAILURE, newestResult.getStatus());
assertEquals("The indexer could not connect to the data source",
newestResult.getErrorMessage());
assertStartAndEndTimeValid(newestResult);
assertEquals(IndexerExecutionStatus.RESET, middleResult.getStatus());
assertStartAndEndTimeValid(middleResult);
assertEquals(IndexerExecutionStatus.SUCCESS, oldestResult.getStatus());
assertEquals(124876, oldestResult.getItemCount());
assertEquals(2, oldestResult.getFailedItemCount());
assertEquals("100", oldestResult.getInitialTrackingState());
assertEquals("200", oldestResult.getFinalTrackingState());
assertStartAndEndTimeValid(oldestResult);
assertEquals(2, oldestResult.getErrors().size());
assertEquals("1", oldestResult.getErrors().get(0).getKey());
assertEquals("Key field contains unsafe characters",
oldestResult.getErrors().get(0).getErrorMessage());
assertEquals("DocumentExtraction.AzureBlob.MyDataSource",
oldestResult.getErrors().get(0).getName());
assertEquals("The file could not be parsed.", oldestResult.getErrors().get(0).getDetails());
assertEquals("https:
oldestResult.getErrors().get(0).getDocumentationLink());
assertEquals("121713", oldestResult.getErrors().get(1).getKey());
assertEquals("Item is too large", oldestResult.getErrors().get(1).getErrorMessage());
assertEquals("DocumentExtraction.AzureBlob.DataReader",
oldestResult.getErrors().get(1).getName());
assertEquals("Blob size cannot exceed 256 MB.", oldestResult.getErrors().get(1).getDetails());
assertEquals("https:
oldestResult.getErrors().get(1).getDocumentationLink());
assertEquals(1, oldestResult.getWarnings().size());
assertEquals("2", oldestResult.getWarnings().get(0).getKey());
assertEquals("Document was truncated to 50000 characters.",
oldestResult.getWarnings().get(0).getMessage());
assertEquals("Enrichment.LanguageDetectionSkill.
oldestResult.getWarnings().get(0).getName());
assertEquals("Try to split the input into smaller chunks using Split skill.",
oldestResult.getWarnings().get(0).getDetails());
assertEquals("https:
oldestResult.getWarnings().get(0).getDocumentationLink());
}
} |
I'll add an example of how to add per-request `x-ms-client-request-id` in the next commit. | private static void autoCompleteWithOneTermContext(SearchClient searchClient) {
AutocompleteOptions params = new AutocompleteOptions().setAutocompleteMode(
AutocompleteMode.ONE_TERM_WITH_CONTEXT);
PagedIterableBase<AutocompleteItem, AutocompletePagedResponse> results = searchClient.autocomplete("coffee m",
"sg", params, Context.NONE);
System.out.println("Received results with one term context:");
results.forEach(result -> System.out.println(result.getText()));
/* Output:
* Received results with one term context:
* coffee maker
*/
} | PagedIterableBase<AutocompleteItem, AutocompletePagedResponse> results = searchClient.autocomplete("coffee m", | private static void autoCompleteWithOneTermContext(SearchClient searchClient) {
AutocompleteOptions params = new AutocompleteOptions().setAutocompleteMode(
AutocompleteMode.ONE_TERM_WITH_CONTEXT);
PagedIterableBase<AutocompleteItem, AutocompletePagedResponse> results = searchClient.autocomplete("coffee m",
"sg", params, Context.NONE);
System.out.println("Received results with one term context:");
results.forEach(result -> System.out.println(result.getText()));
/* Output:
* Received results with one term context:
* coffee maker
*/
} | class AutoCompleteExample {
/**
* From the Azure portal, get your Azure Cognitive Search service URL and API key,
* and set the values of these environment variables:
*/
private static final String ENDPOINT = Configuration.getGlobalConfiguration().get("AZURE_COGNITIVE_SEARCH_ENDPOINT");
private static final String API_KEY = Configuration.getGlobalConfiguration().get("AZURE_COGNITIVE_SEARCH_API_KEY");
public static void main(String[] args) {
SearchClient searchClient = new SearchClientBuilder()
.endpoint(ENDPOINT)
.credential(new AzureKeyCredential(API_KEY))
.indexName("hotels-sample-index")
.buildClient();
autoCompleteWithOneTermContext(searchClient);
autoCompleteWithHighlighting(searchClient);
autoCompleteWithFilterAndFuzzy(searchClient);
}
private static void autoCompleteWithHighlighting(SearchClient searchClient) {
AutocompleteOptions params = new AutocompleteOptions()
.setAutocompleteMode(AutocompleteMode.ONE_TERM)
.setFilter("Address/City eq 'San Diego' or Address/City eq 'Hartford'")
.setHighlightPreTag("<b>")
.setHighlightPostTag("</b>");
PagedIterableBase<AutocompleteItem, AutocompletePagedResponse> results = searchClient.autocomplete("co", "sg", params,
Context.NONE);
System.out.println("Received results with highlighting:");
results.forEach(result -> System.out.println(result.getText()));
/* Output:
* Received results with highlighting:
* coffee
*/
}
private static void autoCompleteWithFilterAndFuzzy(SearchClient searchClient) {
AutocompleteOptions params = new AutocompleteOptions()
.setAutocompleteMode(AutocompleteMode.ONE_TERM)
.setUseFuzzyMatching(true)
.setFilter("HotelId ne '6' and Category eq 'Budget'");
PagedIterableBase<AutocompleteItem, AutocompletePagedResponse> results = searchClient.autocomplete("su", "sg", params,
Context.NONE);
System.out.println("Received results with filter and fuzzy:");
results.forEach(result -> System.out.println(result.getText()));
/* Output:
* Received results with filter and fuzzy:
* suite
*/
}
} | class AutoCompleteExample {
/**
* From the Azure portal, get your Azure Cognitive Search service URL and API key,
* and set the values of these environment variables:
*/
private static final String ENDPOINT = Configuration.getGlobalConfiguration().get("AZURE_COGNITIVE_SEARCH_ENDPOINT");
private static final String API_KEY = Configuration.getGlobalConfiguration().get("AZURE_COGNITIVE_SEARCH_API_KEY");
public static void main(String[] args) {
SearchClient searchClient = new SearchClientBuilder()
.endpoint(ENDPOINT)
.credential(new AzureKeyCredential(API_KEY))
.indexName("hotels-sample-index")
.buildClient();
autoCompleteWithOneTermContext(searchClient);
autoCompleteWithHighlighting(searchClient);
autoCompleteWithFilterAndFuzzy(searchClient);
}
private static void autoCompleteWithHighlighting(SearchClient searchClient) {
AutocompleteOptions params = new AutocompleteOptions()
.setAutocompleteMode(AutocompleteMode.ONE_TERM)
.setFilter("Address/City eq 'San Diego' or Address/City eq 'Hartford'")
.setHighlightPreTag("<b>")
.setHighlightPostTag("</b>");
PagedIterableBase<AutocompleteItem, AutocompletePagedResponse> results = searchClient.autocomplete("co", "sg", params,
Context.NONE);
System.out.println("Received results with highlighting:");
results.forEach(result -> System.out.println(result.getText()));
/* Output:
* Received results with highlighting:
* coffee
*/
}
private static void autoCompleteWithFilterAndFuzzy(SearchClient searchClient) {
AutocompleteOptions params = new AutocompleteOptions()
.setAutocompleteMode(AutocompleteMode.ONE_TERM)
.setUseFuzzyMatching(true)
.setFilter("HotelId ne '6' and Category eq 'Budget'");
PagedIterableBase<AutocompleteItem, AutocompletePagedResponse> results = searchClient.autocomplete("su", "sg", params,
Context.NONE);
System.out.println("Received results with filter and fuzzy:");
results.forEach(result -> System.out.println(result.getText()));
/* Output:
* Received results with filter and fuzzy:
* suite
*/
}
} |
Will add a few tests. As a heads up we never validated the `x-ms-client-request-id` returned from the service match what was set in `RequestOptions` so this will be even more important to do. | public void canCreateAndListIndexerNames() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName);
indexer1.setName("a" + indexer1.getName());
SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName);
indexer2.setName("b" + indexer2.getName());
searchIndexerClient.createIndexer(indexer1);
indexersToDelete.add(indexer1.getName());
searchIndexerClient.createIndexer(indexer2);
indexersToDelete.add(indexer2.getName());
Iterator<String> indexersRes = searchIndexerClient.listIndexerNames(Context.NONE)
.iterator();
String actualIndexer = indexersRes.next();
assertNotNull(actualIndexer);
assertEquals(indexer1.getName(), actualIndexer);
actualIndexer = indexersRes.next();
assertNotNull(actualIndexer);
assertEquals(indexer2.getName(), actualIndexer);
assertFalse(indexersRes.hasNext());
} | Iterator<String> indexersRes = searchIndexerClient.listIndexerNames(Context.NONE) | public void canCreateAndListIndexerNames() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName);
mutateName(indexer1, "a" + indexer1.getName());
SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName);
mutateName(indexer2, "b" + indexer2.getName());
searchIndexerClient.createIndexer(indexer1);
indexersToDelete.add(indexer1.getName());
searchIndexerClient.createIndexer(indexer2);
indexersToDelete.add(indexer2.getName());
Iterator<String> indexersRes = searchIndexerClient.listIndexerNames(Context.NONE)
.iterator();
String actualIndexer = indexersRes.next();
assertNotNull(actualIndexer);
assertEquals(indexer1.getName(), actualIndexer);
actualIndexer = indexersRes.next();
assertNotNull(actualIndexer);
assertEquals(indexer2.getName(), actualIndexer);
assertFalse(indexersRes.hasNext());
} | class IndexersManagementSyncTests extends SearchTestBase {
private static final String TARGET_INDEX_NAME = "indexforindexers";
private static final HttpPipelinePolicy MOCK_STATUS_PIPELINE_POLICY =
new CustomQueryPipelinePolicy("mock_status", "inProgress");
private final List<String> dataSourcesToDelete = new ArrayList<>();
private final List<String> indexersToDelete = new ArrayList<>();
private final List<String> indexesToDelete = new ArrayList<>();
private final List<String> skillsetsToDelete = new ArrayList<>();
private SearchIndexerClient searchIndexerClient;
private SearchIndexClient searchIndexClient;
private String createDataSource() {
SearchIndexerDataSourceConnection dataSource = createTestSqlDataSourceObject();
searchIndexerClient.createOrUpdateDataSourceConnection(dataSource);
dataSourcesToDelete.add(dataSource.getName());
return dataSource.getName();
}
private String createIndex() {
SearchIndex index = createTestIndexForLiveDatasource();
searchIndexClient.createIndex(index);
indexesToDelete.add(index.getName());
return index.getName();
}
private SearchIndexer createTestDataSourceAndIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
return indexer;
}
/**
* Creates the index and indexer in the search service and then retrieves the indexer and validates it
*
* @param indexer the indexer to be created
*/
private void createAndValidateIndexer(SearchIndexer indexer) {
SearchIndexer indexerResponse = searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexerResponse.getName());
setSameStartTime(indexer, indexerResponse);
assertObjectEquals(indexer, indexerResponse, true, "etag");
}
@Override
protected void beforeTest() {
super.beforeTest();
searchIndexerClient = getSearchIndexerClientBuilder().buildClient();
searchIndexClient = getSearchIndexClientBuilder().buildClient();
}
@Override
protected void afterTest() {
super.afterTest();
for (String skillset : skillsetsToDelete) {
searchIndexerClient.deleteSkillset(skillset);
}
for (String dataSource : dataSourcesToDelete) {
searchIndexerClient.deleteDataSourceConnection(dataSource);
}
for (String indexer : indexersToDelete) {
searchIndexerClient.deleteIndexer(indexer);
}
for (String index : indexesToDelete) {
searchIndexClient.deleteIndex(index);
}
}
@Test
public void createIndexerReturnsCorrectDefinition() {
SearchIndexer expectedIndexer = createBaseTestIndexerObject(createIndex(), createDataSource())
.setIsDisabled(true)
.setParameters(new IndexingParameters()
.setBatchSize(50)
.setMaxFailedItems(10)
.setMaxFailedItemsPerBatch(10));
SearchIndexer actualIndexer = searchIndexerClient.createIndexer(expectedIndexer);
indexersToDelete.add(actualIndexer.getName());
expectedIndexer.setParameters(new IndexingParameters()
.setConfiguration(Collections.emptyMap()));
setSameStartTime(expectedIndexer, actualIndexer);
assertObjectEquals(expectedIndexer, actualIndexer, true, "etag");
}
@Test
public void canCreateAndListIndexers() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName);
indexer1.setName("a" + indexer1.getName());
SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName);
indexer2.setName("b" + indexer2.getName());
searchIndexerClient.createIndexer(indexer1);
indexersToDelete.add(indexer1.getName());
searchIndexerClient.createIndexer(indexer2);
indexersToDelete.add(indexer2.getName());
Iterator<SearchIndexer> indexers = searchIndexerClient.listIndexers().iterator();
SearchIndexer returnedIndexer = indexers.next();
assertObjectEquals(indexer1, returnedIndexer, true, "etag");
returnedIndexer = indexers.next();
assertObjectEquals(indexer2, returnedIndexer, true, "etag");
assertFalse(indexers.hasNext());
}
@Test
@Test
public void createIndexerFailsWithUsefulMessageOnUserError() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), "thisdatasourcedoesnotexist");
assertHttpResponseException(
() -> searchIndexerClient.createIndexer(indexer),
HttpURLConnection.HTTP_BAD_REQUEST,
"This indexer refers to a data source 'thisdatasourcedoesnotexist' that doesn't exist");
}
@Test
public void canResetIndexerAndGetIndexerStatus() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.resetIndexer(indexer.getName());
SearchIndexerStatus indexerStatus = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerStatus.getStatus());
assertEquals(IndexerExecutionStatus.RESET, indexerStatus.getLastResult().getStatus());
}
@Test
public void canResetIndexerAndGetIndexerStatusWithResponse() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.resetIndexerWithResponse(indexer.getName(), Context.NONE);
SearchIndexerStatus indexerStatusResponse = searchIndexerClient.getIndexerStatusWithResponse(indexer.getName(),
Context.NONE).getValue();
assertEquals(IndexerStatus.RUNNING, indexerStatusResponse.getStatus());
assertEquals(IndexerExecutionStatus.RESET, indexerStatusResponse.getLastResult().getStatus());
}
@Test
public void canRunIndexer() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.runIndexer(indexer.getName());
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
}
@Test
public void canRunIndexerWithResponse() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
Response<Void> response = searchIndexerClient.runIndexerWithResponse(indexer.getName(), Context.NONE);
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(HttpURLConnection.HTTP_ACCEPTED, response.getStatusCode());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
}
@Test
public void canRunIndexerAndGetIndexerStatus() {
searchIndexerClient = getSearchIndexerClientBuilder(MOCK_STATUS_PIPELINE_POLICY).buildClient();
searchIndexClient = getSearchIndexClientBuilder(MOCK_STATUS_PIPELINE_POLICY).buildClient();
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
Response<Void> indexerRunResponse = searchIndexerClient.runIndexerWithResponse(indexer.getName(),
Context.NONE);
assertEquals(HttpResponseStatus.ACCEPTED.code(), indexerRunResponse.getStatusCode());
indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertValidSearchIndexerStatus(indexerExecutionInfo);
}
@Test
public void canUpdateIndexer() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentDescription(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canUpdateIndexerFieldMapping() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentFieldMapping(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithFieldMapping() {
SearchIndexer indexer = createIndexerWithDifferentFieldMapping(createIndex(), createDataSource());
createAndValidateIndexer(indexer);
}
@Test
public void canUpdateIndexerDisabled() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createDisabledIndexer(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canUpdateIndexerSchedule() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentSchedule(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithSchedule() {
SearchIndexer indexer = createIndexerWithDifferentSchedule(createIndex(), createDataSource());
createAndValidateIndexer(indexer);
}
@Test
public void canUpdateIndexerBatchSizeMaxFailedItems() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentIndexingParameters(initial);
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithBatchSizeMaxFailedItems() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer updatedExpected = createIndexerWithDifferentIndexingParameters(indexer);
createAndValidateIndexer(updatedExpected);
}
@Test
public void canUpdateIndexerBlobParams() {
String indexName = createIndex();
String dataSourceName = searchIndexerClient.createDataSourceConnection(createBlobDataSource()).getName();
dataSourcesToDelete.add(dataSourceName);
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithStorageConfig(indexName, dataSourceName)
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithBlobParams() {
SearchIndexerDataSourceConnection blobDataSource = createBlobDataSource();
SearchIndexerDataSourceConnection dataSource = searchIndexerClient.createOrUpdateDataSourceConnection(blobDataSource);
dataSourcesToDelete.add(dataSource.getName());
SearchIndexer indexer = createIndexerWithStorageConfig(createIndex(), dataSource.getName());
createAndValidateIndexer(indexer);
}
@Test
public void canCreateAndDeleteIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
searchIndexerClient.deleteIndexer(indexer.getName());
assertThrows(HttpResponseException.class, () -> searchIndexerClient.getIndexer(indexer.getName()));
}
@Test
public void canCreateAndDeleteIndexerWithResponse() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexerWithResponse(indexer, Context.NONE);
searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertThrows(HttpResponseException.class, () -> searchIndexerClient.getIndexer(indexer.getName()));
}
@Test
public void deleteIndexerIsIdempotent() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
Response<Void> result = searchIndexerClient.deleteIndexerWithResponse(indexer, false,
Context.NONE);
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, result.getStatusCode());
searchIndexerClient.createIndexer(indexer);
result = searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, result.getStatusCode());
result = searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, result.getStatusCode());
}
@Test
public void canCreateAndGetIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
SearchIndexer indexerResult = searchIndexerClient.getIndexer(indexer.getName());
assertObjectEquals(indexer, indexerResult, true, "etag");
indexerResult = searchIndexerClient.getIndexerWithResponse(indexer.getName(), Context.NONE)
.getValue();
assertObjectEquals(indexer, indexerResult, true, "etag");
}
@Test
public void getIndexerThrowsOnNotFound() {
assertHttpResponseException(
() -> searchIndexerClient.getIndexer("thisindexerdoesnotexist"),
HttpURLConnection.HTTP_NOT_FOUND,
"Indexer 'thisindexerdoesnotexist' was not found");
}
@Test
public void createOrUpdateIndexerIfNotExistsSucceedsOnNoResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer created = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, true, Context.NONE)
.getValue();
indexersToDelete.add(created.getName());
assertFalse(CoreUtils.isNullOrEmpty(created.getETag()));
}
@Test
public void deleteIndexerIfExistsWorksOnlyWhenResourceExists() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer created = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
searchIndexerClient.deleteIndexerWithResponse(created, true, Context.NONE);
try {
searchIndexerClient.deleteIndexerWithResponse(created, true, Context.NONE);
fail("deleteFunc should have failed due to non existent resource.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
}
@Test
public void deleteIndexerIfNotChangedWorksOnlyOnCurrentResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer stale = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, true, Context.NONE)
.getValue();
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(stale, false, Context.NONE)
.getValue();
try {
searchIndexerClient.deleteIndexerWithResponse(stale, true, Context.NONE);
fail("deleteFunc should have failed due to precondition.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
searchIndexerClient.deleteIndexerWithResponse(updated, true, Context.NONE);
}
@Test
public void updateIndexerIfExistsSucceedsOnExistingResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
false, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void updateIndexerIfNotChangedFailsWhenResourceChanged() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
true, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
try {
searchIndexerClient.createOrUpdateIndexerWithResponse(original, true, Context.NONE);
fail("createOrUpdateDefinition should have failed due to precondition.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
assertFalse(CoreUtils.isNullOrEmpty(originalETag));
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void updateIndexerIfNotChangedSucceedsWhenResourceUnchanged() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
true, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
assertFalse(CoreUtils.isNullOrEmpty(originalETag));
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void canUpdateIndexerSkillset() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexerSkillset skillset = createSkillsetObject();
searchIndexerClient.createSkillset(skillset);
skillsetsToDelete.add(skillset.getName());
SearchIndexer updated = createIndexerWithDifferentSkillset(indexName, dataSourceName, skillset.getName())
.setName(initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithSkillset() {
SearchIndexerSkillset skillset = searchIndexerClient.createSkillset(createSkillsetObject());
skillsetsToDelete.add(skillset.getName());
SearchIndexer indexer = createIndexerWithDifferentSkillset(createIndex(), createDataSource(), skillset.getName());
createAndValidateIndexer(indexer);
}
/**
* Create a new valid skillset object
*
* @return the newly created skillset object
*/
SearchIndexerSkillset createSkillsetObject() {
List<InputFieldMappingEntry> inputs = Arrays.asList(
new InputFieldMappingEntry()
.setName("url")
.setSource("/document/url"),
new InputFieldMappingEntry()
.setName("queryString")
.setSource("/document/queryString")
);
List<OutputFieldMappingEntry> outputs = Collections.singletonList(
new OutputFieldMappingEntry()
.setName("text")
.setTargetName("mytext")
);
List<SearchIndexerSkill> skills = Collections.singletonList(
new OcrSkill()
.setShouldDetectOrientation(true)
.setName("myocr")
.setDescription("Tested OCR skill")
.setContext("/document")
.setInputs(inputs)
.setOutputs(outputs)
);
return new SearchIndexerSkillset()
.setName(testResourceNamer.randomName("ocr-skillset", 32))
.setDescription("Skillset for testing default configuration")
.setSkills(skills);
}
SearchIndexer createBaseTestIndexerObject(String targetIndexName, String dataSourceName) {
return new SearchIndexer()
.setName(testResourceNamer.randomName("indexer", 32))
.setTargetIndexName(targetIndexName)
.setDataSourceName(dataSourceName)
.setSchedule(new IndexingSchedule().setInterval(Duration.ofDays(1)));
}
/**
* This index contains fields that are declared on the live data source we use to test the indexers
*
* @return the newly created Index object
*/
SearchIndex createTestIndexForLiveDatasource() {
return new SearchIndex()
.setName(testResourceNamer.randomName(IndexersManagementSyncTests.TARGET_INDEX_NAME, 32))
.setFields(Arrays.asList(
new SearchField()
.setName("county_name")
.setType(SearchFieldDataType.STRING)
.setSearchable(Boolean.FALSE)
.setFilterable(Boolean.TRUE),
new SearchField()
.setName("state")
.setType(SearchFieldDataType.STRING)
.setSearchable(Boolean.TRUE)
.setFilterable(Boolean.TRUE),
new SearchField()
.setName("feature_id")
.setType(SearchFieldDataType.STRING)
.setKey(Boolean.TRUE)
.setSearchable(Boolean.TRUE)
.setFilterable(Boolean.FALSE)));
}
/**
* Create a new indexer and change its description property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentDescription(String targetIndexName, String dataSourceName) {
return createBaseTestIndexerObject(targetIndexName, dataSourceName)
.setDescription("somethingdifferent");
}
/**
* Create a new indexer and change its field mappings property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentFieldMapping(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
List<FieldMapping> fieldMappings = Collections.singletonList(new FieldMapping()
.setSourceFieldName("state_alpha")
.setTargetFieldName("state"));
indexer.setFieldMappings(fieldMappings);
return indexer;
}
/**
* Create a new indexer and set the Disabled property to true
*
* @return the created indexer
*/
SearchIndexer createDisabledIndexer(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
indexer.setIsDisabled(false);
return indexer;
}
/**
* Create a new indexer and change its schedule property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentSchedule(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
IndexingSchedule is = new IndexingSchedule()
.setInterval(Duration.ofMinutes(10));
indexer.setSchedule(is);
return indexer;
}
/**
* Create a new indexer and change its skillset
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentSkillset(String targetIndexName, String dataSourceName, String skillsetName) {
return createBaseTestIndexerObject(targetIndexName, dataSourceName)
.setSkillsetName(skillsetName);
}
/**
* Create a new indexer and change its indexing parameters
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentIndexingParameters(SearchIndexer indexer) {
IndexingParameters ip = new IndexingParameters()
.setMaxFailedItems(121)
.setMaxFailedItemsPerBatch(11)
.setBatchSize(20);
indexer.setParameters(ip);
return indexer;
}
SearchIndexer createIndexerWithStorageConfig(String targetIndexName, String dataSourceName) {
SearchIndexer updatedExpected = createBaseTestIndexerObject(targetIndexName, dataSourceName);
HashMap<String, Object> config = new HashMap<>();
config.put("indexedFileNameExtensions", ".pdf,.docx");
config.put("excludedFileNameExtensions", ".xlsx");
config.put("dataToExtract", "storageMetadata");
config.put("failOnUnsupportedContentType", false);
IndexingParameters ip = new IndexingParameters()
.setConfiguration(config);
updatedExpected.setParameters(ip);
return updatedExpected;
}
void setSameStartTime(SearchIndexer expected, SearchIndexer actual) {
expected.getSchedule().setStartTime(actual.getSchedule().getStartTime());
}
void assertStartAndEndTimeValid(IndexerExecutionResult result) {
assertNotNull(result.getStartTime());
assertNotEquals(OffsetDateTime.now(), result.getStartTime());
assertNotNull(result.getEndTime());
assertNotEquals(OffsetDateTime.now(), result.getEndTime());
}
void assertValidSearchIndexerStatus(SearchIndexerStatus indexerExecutionInfo) {
assertEquals(IndexerExecutionStatus.IN_PROGRESS, indexerExecutionInfo.getLastResult().getStatus());
assertEquals(3, indexerExecutionInfo.getExecutionHistory().size());
SearchIndexerLimits limits = indexerExecutionInfo.getLimits();
assertNotNull(limits);
assertEquals(100000, limits.getMaxDocumentContentCharactersToExtract(), 0);
assertEquals(1000, limits.getMaxDocumentExtractionSize(), 0);
IndexerExecutionResult newestResult = indexerExecutionInfo.getExecutionHistory().get(0);
IndexerExecutionResult middleResult = indexerExecutionInfo.getExecutionHistory().get(1);
IndexerExecutionResult oldestResult = indexerExecutionInfo.getExecutionHistory().get(2);
assertEquals(IndexerExecutionStatus.TRANSIENT_FAILURE, newestResult.getStatus());
assertEquals("The indexer could not connect to the data source",
newestResult.getErrorMessage());
assertStartAndEndTimeValid(newestResult);
assertEquals(IndexerExecutionStatus.RESET, middleResult.getStatus());
assertStartAndEndTimeValid(middleResult);
assertEquals(IndexerExecutionStatus.SUCCESS, oldestResult.getStatus());
assertEquals(124876, oldestResult.getItemCount());
assertEquals(2, oldestResult.getFailedItemCount());
assertEquals("100", oldestResult.getInitialTrackingState());
assertEquals("200", oldestResult.getFinalTrackingState());
assertStartAndEndTimeValid(oldestResult);
assertEquals(2, oldestResult.getErrors().size());
assertEquals("1", oldestResult.getErrors().get(0).getKey());
assertEquals("Key field contains unsafe characters",
oldestResult.getErrors().get(0).getErrorMessage());
assertEquals("DocumentExtraction.AzureBlob.MyDataSource",
oldestResult.getErrors().get(0).getName());
assertEquals("The file could not be parsed.", oldestResult.getErrors().get(0).getDetails());
assertEquals("https:
oldestResult.getErrors().get(0).getDocumentationLink());
assertEquals("121713", oldestResult.getErrors().get(1).getKey());
assertEquals("Item is too large", oldestResult.getErrors().get(1).getErrorMessage());
assertEquals("DocumentExtraction.AzureBlob.DataReader",
oldestResult.getErrors().get(1).getName());
assertEquals("Blob size cannot exceed 256 MB.", oldestResult.getErrors().get(1).getDetails());
assertEquals("https:
oldestResult.getErrors().get(1).getDocumentationLink());
assertEquals(1, oldestResult.getWarnings().size());
assertEquals("2", oldestResult.getWarnings().get(0).getKey());
assertEquals("Document was truncated to 50000 characters.",
oldestResult.getWarnings().get(0).getMessage());
assertEquals("Enrichment.LanguageDetectionSkill.
oldestResult.getWarnings().get(0).getName());
assertEquals("Try to split the input into smaller chunks using Split skill.",
oldestResult.getWarnings().get(0).getDetails());
assertEquals("https:
oldestResult.getWarnings().get(0).getDocumentationLink());
}
} | class IndexersManagementSyncTests extends SearchTestBase {
private static final String TARGET_INDEX_NAME = "indexforindexers";
private static final HttpPipelinePolicy MOCK_STATUS_PIPELINE_POLICY =
new CustomQueryPipelinePolicy("mock_status", "inProgress");
private final List<String> dataSourcesToDelete = new ArrayList<>();
private final List<String> indexersToDelete = new ArrayList<>();
private final List<String> indexesToDelete = new ArrayList<>();
private final List<String> skillsetsToDelete = new ArrayList<>();
private SearchIndexerClient searchIndexerClient;
private SearchIndexClient searchIndexClient;
private String createDataSource() {
SearchIndexerDataSourceConnection dataSource = createTestSqlDataSourceObject();
searchIndexerClient.createOrUpdateDataSourceConnection(dataSource);
dataSourcesToDelete.add(dataSource.getName());
return dataSource.getName();
}
private String createIndex() {
SearchIndex index = createTestIndexForLiveDatasource();
searchIndexClient.createIndex(index);
indexesToDelete.add(index.getName());
return index.getName();
}
private SearchIndexer createTestDataSourceAndIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
return indexer;
}
/**
* Creates the index and indexer in the search service and then retrieves the indexer and validates it
*
* @param indexer the indexer to be created
*/
private void createAndValidateIndexer(SearchIndexer indexer) {
SearchIndexer indexerResponse = searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexerResponse.getName());
setSameStartTime(indexer, indexerResponse);
assertObjectEquals(indexer, indexerResponse, true, "etag");
}
@Override
protected void beforeTest() {
super.beforeTest();
searchIndexerClient = getSearchIndexerClientBuilder().buildClient();
searchIndexClient = getSearchIndexClientBuilder().buildClient();
}
@Override
protected void afterTest() {
super.afterTest();
for (String skillset : skillsetsToDelete) {
searchIndexerClient.deleteSkillset(skillset);
}
for (String dataSource : dataSourcesToDelete) {
searchIndexerClient.deleteDataSourceConnection(dataSource);
}
for (String indexer : indexersToDelete) {
searchIndexerClient.deleteIndexer(indexer);
}
for (String index : indexesToDelete) {
searchIndexClient.deleteIndex(index);
}
}
@Test
public void createIndexerReturnsCorrectDefinition() {
SearchIndexer expectedIndexer = createBaseTestIndexerObject(createIndex(), createDataSource())
.setIsDisabled(true)
.setParameters(new IndexingParameters()
.setBatchSize(50)
.setMaxFailedItems(10)
.setMaxFailedItemsPerBatch(10));
SearchIndexer actualIndexer = searchIndexerClient.createIndexer(expectedIndexer);
indexersToDelete.add(actualIndexer.getName());
expectedIndexer.setParameters(new IndexingParameters()
.setConfiguration(Collections.emptyMap()));
setSameStartTime(expectedIndexer, actualIndexer);
assertObjectEquals(expectedIndexer, actualIndexer, true, "etag");
}
@Test
public void canCreateAndListIndexers() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer indexer1 = createBaseTestIndexerObject(indexName, dataSourceName);
mutateName(indexer1, "a" + indexer1.getName());
SearchIndexer indexer2 = createBaseTestIndexerObject(indexName, dataSourceName);
mutateName(indexer2, "b" + indexer2.getName());
searchIndexerClient.createIndexer(indexer1);
indexersToDelete.add(indexer1.getName());
searchIndexerClient.createIndexer(indexer2);
indexersToDelete.add(indexer2.getName());
Iterator<SearchIndexer> indexers = searchIndexerClient.listIndexers().iterator();
SearchIndexer returnedIndexer = indexers.next();
assertObjectEquals(indexer1, returnedIndexer, true, "etag");
returnedIndexer = indexers.next();
assertObjectEquals(indexer2, returnedIndexer, true, "etag");
assertFalse(indexers.hasNext());
}
@Test
@Test
public void createIndexerFailsWithUsefulMessageOnUserError() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), "thisdatasourcedoesnotexist");
assertHttpResponseException(
() -> searchIndexerClient.createIndexer(indexer),
HttpURLConnection.HTTP_BAD_REQUEST,
"This indexer refers to a data source 'thisdatasourcedoesnotexist' that doesn't exist");
}
@Test
public void canResetIndexerAndGetIndexerStatus() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.resetIndexer(indexer.getName());
SearchIndexerStatus indexerStatus = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerStatus.getStatus());
assertEquals(IndexerExecutionStatus.RESET, indexerStatus.getLastResult().getStatus());
}
@Test
public void canResetIndexerAndGetIndexerStatusWithResponse() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.resetIndexerWithResponse(indexer.getName(), Context.NONE);
SearchIndexerStatus indexerStatusResponse = searchIndexerClient.getIndexerStatusWithResponse(indexer.getName(),
Context.NONE).getValue();
assertEquals(IndexerStatus.RUNNING, indexerStatusResponse.getStatus());
assertEquals(IndexerExecutionStatus.RESET, indexerStatusResponse.getLastResult().getStatus());
}
@Test
public void canRunIndexer() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
searchIndexerClient.runIndexer(indexer.getName());
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
}
@Test
public void canRunIndexerWithResponse() {
SearchIndexer indexer = createTestDataSourceAndIndexer();
Response<Void> response = searchIndexerClient.runIndexerWithResponse(indexer.getName(), Context.NONE);
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(HttpURLConnection.HTTP_ACCEPTED, response.getStatusCode());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
}
@Test
public void canRunIndexerAndGetIndexerStatus() {
searchIndexerClient = getSearchIndexerClientBuilder(MOCK_STATUS_PIPELINE_POLICY).buildClient();
searchIndexClient = getSearchIndexClientBuilder().addPolicy(MOCK_STATUS_PIPELINE_POLICY).buildClient();
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
SearchIndexerStatus indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertEquals(IndexerStatus.RUNNING, indexerExecutionInfo.getStatus());
Response<Void> indexerRunResponse = searchIndexerClient.runIndexerWithResponse(indexer.getName(),
Context.NONE);
assertEquals(HttpResponseStatus.ACCEPTED.code(), indexerRunResponse.getStatusCode());
indexerExecutionInfo = searchIndexerClient.getIndexerStatus(indexer.getName());
assertValidSearchIndexerStatus(indexerExecutionInfo);
}
@Test
public void canUpdateIndexer() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentDescription(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canUpdateIndexerFieldMapping() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentFieldMapping(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithFieldMapping() {
SearchIndexer indexer = createIndexerWithDifferentFieldMapping(createIndex(), createDataSource());
createAndValidateIndexer(indexer);
}
@Test
public void canUpdateIndexerDisabled() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createDisabledIndexer(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canUpdateIndexerSchedule() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentSchedule(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithSchedule() {
SearchIndexer indexer = createIndexerWithDifferentSchedule(createIndex(), createDataSource());
createAndValidateIndexer(indexer);
}
@Test
public void canUpdateIndexerBatchSizeMaxFailedItems() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithDifferentIndexingParameters(initial);
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithBatchSizeMaxFailedItems() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer updatedExpected = createIndexerWithDifferentIndexingParameters(indexer);
createAndValidateIndexer(updatedExpected);
}
@Test
public void canUpdateIndexerBlobParams() {
String indexName = createIndex();
String dataSourceName = searchIndexerClient.createDataSourceConnection(createBlobDataSource()).getName();
dataSourcesToDelete.add(dataSourceName);
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexer updated = createIndexerWithStorageConfig(indexName, dataSourceName);
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithBlobParams() {
SearchIndexerDataSourceConnection blobDataSource = createBlobDataSource();
SearchIndexerDataSourceConnection dataSource = searchIndexerClient.createOrUpdateDataSourceConnection(blobDataSource);
dataSourcesToDelete.add(dataSource.getName());
SearchIndexer indexer = createIndexerWithStorageConfig(createIndex(), dataSource.getName());
createAndValidateIndexer(indexer);
}
@Test
public void canCreateAndDeleteIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
searchIndexerClient.deleteIndexer(indexer.getName());
assertThrows(HttpResponseException.class, () -> searchIndexerClient.getIndexer(indexer.getName()));
}
@Test
public void canCreateAndDeleteIndexerWithResponse() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexerWithResponse(indexer, Context.NONE);
searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertThrows(HttpResponseException.class, () -> searchIndexerClient.getIndexer(indexer.getName()));
}
@Test
public void deleteIndexerIsIdempotent() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
Response<Void> result = searchIndexerClient.deleteIndexerWithResponse(indexer, false,
Context.NONE);
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, result.getStatusCode());
searchIndexerClient.createIndexer(indexer);
result = searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, result.getStatusCode());
result = searchIndexerClient.deleteIndexerWithResponse(indexer, false, Context.NONE);
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, result.getStatusCode());
}
@Test
public void canCreateAndGetIndexer() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
searchIndexerClient.createIndexer(indexer);
indexersToDelete.add(indexer.getName());
SearchIndexer indexerResult = searchIndexerClient.getIndexer(indexer.getName());
assertObjectEquals(indexer, indexerResult, true, "etag");
indexerResult = searchIndexerClient.getIndexerWithResponse(indexer.getName(), Context.NONE)
.getValue();
assertObjectEquals(indexer, indexerResult, true, "etag");
}
@Test
public void getIndexerThrowsOnNotFound() {
assertHttpResponseException(
() -> searchIndexerClient.getIndexer("thisindexerdoesnotexist"),
HttpURLConnection.HTTP_NOT_FOUND,
"Indexer 'thisindexerdoesnotexist' was not found");
}
@Test
public void createOrUpdateIndexerIfNotExistsSucceedsOnNoResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer created = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, true, Context.NONE)
.getValue();
indexersToDelete.add(created.getName());
assertFalse(CoreUtils.isNullOrEmpty(created.getETag()));
}
@Test
public void deleteIndexerIfExistsWorksOnlyWhenResourceExists() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer created = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
searchIndexerClient.deleteIndexerWithResponse(created, true, Context.NONE);
try {
searchIndexerClient.deleteIndexerWithResponse(created, true, Context.NONE);
fail("deleteFunc should have failed due to non existent resource.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
}
@Test
public void deleteIndexerIfNotChangedWorksOnlyOnCurrentResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer stale = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, true, Context.NONE)
.getValue();
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(stale, false, Context.NONE)
.getValue();
try {
searchIndexerClient.deleteIndexerWithResponse(stale, true, Context.NONE);
fail("deleteFunc should have failed due to precondition.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
searchIndexerClient.deleteIndexerWithResponse(updated, true, Context.NONE);
}
@Test
public void updateIndexerIfExistsSucceedsOnExistingResource() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
false, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void updateIndexerIfNotChangedFailsWhenResourceChanged() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
true, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
try {
searchIndexerClient.createOrUpdateIndexerWithResponse(original, true, Context.NONE);
fail("createOrUpdateDefinition should have failed due to precondition.");
} catch (HttpResponseException ex) {
assertEquals(HttpURLConnection.HTTP_PRECON_FAILED, ex.getResponse().getStatusCode());
}
assertFalse(CoreUtils.isNullOrEmpty(originalETag));
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void updateIndexerIfNotChangedSucceedsWhenResourceUnchanged() {
SearchIndexer indexer = createBaseTestIndexerObject(createIndex(), createDataSource());
SearchIndexer original = searchIndexerClient.createOrUpdateIndexerWithResponse(indexer, false, Context.NONE)
.getValue();
String originalETag = original.getETag();
indexersToDelete.add(original.getName());
SearchIndexer updated = searchIndexerClient.createOrUpdateIndexerWithResponse(original.setDescription("ABrandNewDescription"),
true, Context.NONE)
.getValue();
String updatedETag = updated.getETag();
assertFalse(CoreUtils.isNullOrEmpty(originalETag));
assertFalse(CoreUtils.isNullOrEmpty(updatedETag));
assertNotEquals(originalETag, updatedETag);
}
@Test
public void canUpdateIndexerSkillset() {
String indexName = createIndex();
String dataSourceName = createDataSource();
SearchIndexer initial = createBaseTestIndexerObject(indexName, dataSourceName).setIsDisabled(true);
searchIndexerClient.createIndexer(initial);
indexersToDelete.add(initial.getName());
SearchIndexerSkillset skillset = createSkillsetObject();
searchIndexerClient.createSkillset(skillset);
skillsetsToDelete.add(skillset.getName());
SearchIndexer updated = createIndexerWithDifferentSkillset(indexName, dataSourceName, skillset.getName());
mutateName(updated, initial.getName());
SearchIndexer indexerResponse = searchIndexerClient.createOrUpdateIndexer(updated);
setSameStartTime(updated, indexerResponse);
assertObjectEquals(updated, indexerResponse, true, "etag");
}
@Test
public void canCreateIndexerWithSkillset() {
SearchIndexerSkillset skillset = searchIndexerClient.createSkillset(createSkillsetObject());
skillsetsToDelete.add(skillset.getName());
SearchIndexer indexer = createIndexerWithDifferentSkillset(createIndex(), createDataSource(), skillset.getName());
createAndValidateIndexer(indexer);
}
void mutateName(SearchIndexer updateIndexer, String indexerName) {
try {
Field updateField = updateIndexer.getClass().getDeclaredField("name");
updateField.setAccessible(true);
updateField.set(updateIndexer, indexerName);
} catch (Exception e) {
throw Exceptions.propagate(e);
}
}
/**
* Create a new valid skillset object
*
* @return the newly created skillset object
*/
SearchIndexerSkillset createSkillsetObject() {
List<InputFieldMappingEntry> inputs = Arrays.asList(
new InputFieldMappingEntry("url")
.setSource("/document/url"),
new InputFieldMappingEntry("queryString")
.setSource("/document/queryString")
);
List<OutputFieldMappingEntry> outputs = Collections.singletonList(
new OutputFieldMappingEntry("text")
.setTargetName("mytext")
);
List<SearchIndexerSkill> skills = Collections.singletonList(
new OcrSkill(inputs, outputs)
.setShouldDetectOrientation(true)
.setName("myocr")
.setDescription("Tested OCR skill")
.setContext("/document")
);
return new SearchIndexerSkillset(testResourceNamer.randomName("ocr-skillset", 32))
.setDescription("Skillset for testing default configuration")
.setSkills(skills);
}
SearchIndexer createBaseTestIndexerObject(String targetIndexName, String dataSourceName) {
return new SearchIndexer(testResourceNamer.randomName("indexer", 32), dataSourceName, targetIndexName)
.setSchedule(new IndexingSchedule(Duration.ofDays(1)));
}
/**
* This index contains fields that are declared on the live data source we use to test the indexers
*
* @return the newly created Index object
*/
SearchIndex createTestIndexForLiveDatasource() {
return new SearchIndex(testResourceNamer.randomName(IndexersManagementSyncTests.TARGET_INDEX_NAME, 32))
.setFields(Arrays.asList(
new SearchField("county_name", SearchFieldDataType.STRING)
.setSearchable(Boolean.FALSE)
.setFilterable(Boolean.TRUE),
new SearchField("state", SearchFieldDataType.STRING)
.setSearchable(Boolean.TRUE)
.setFilterable(Boolean.TRUE),
new SearchField("feature_id", SearchFieldDataType.STRING)
.setKey(Boolean.TRUE)
.setSearchable(Boolean.TRUE)
.setFilterable(Boolean.FALSE)));
}
/**
* Create a new indexer and change its description property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentDescription(String targetIndexName, String dataSourceName) {
return createBaseTestIndexerObject(targetIndexName, dataSourceName)
.setDescription("somethingdifferent");
}
/**
* Create a new indexer and change its field mappings property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentFieldMapping(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
List<FieldMapping> fieldMappings = Collections.singletonList(new FieldMapping("state_alpha")
.setTargetFieldName("state"));
indexer.setFieldMappings(fieldMappings);
return indexer;
}
/**
* Create a new indexer and set the Disabled property to true
*
* @return the created indexer
*/
SearchIndexer createDisabledIndexer(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
indexer.setIsDisabled(false);
return indexer;
}
/**
* Create a new indexer and change its schedule property
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentSchedule(String targetIndexName, String dataSourceName) {
SearchIndexer indexer = createBaseTestIndexerObject(targetIndexName, dataSourceName);
IndexingSchedule is = new IndexingSchedule(Duration.ofMinutes(10));
indexer.setSchedule(is);
return indexer;
}
/**
* Create a new indexer and change its skillset
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentSkillset(String targetIndexName, String dataSourceName, String skillsetName) {
return createBaseTestIndexerObject(targetIndexName, dataSourceName)
.setSkillsetName(skillsetName);
}
/**
* Create a new indexer and change its indexing parameters
*
* @return the created indexer
*/
SearchIndexer createIndexerWithDifferentIndexingParameters(SearchIndexer indexer) {
IndexingParameters ip = new IndexingParameters()
.setMaxFailedItems(121)
.setMaxFailedItemsPerBatch(11)
.setBatchSize(20);
indexer.setParameters(ip);
return indexer;
}
SearchIndexer createIndexerWithStorageConfig(String targetIndexName, String dataSourceName) {
SearchIndexer updatedExpected = createBaseTestIndexerObject(targetIndexName, dataSourceName);
HashMap<String, Object> config = new HashMap<>();
config.put("indexedFileNameExtensions", ".pdf,.docx");
config.put("excludedFileNameExtensions", ".xlsx");
config.put("dataToExtract", "storageMetadata");
config.put("failOnUnsupportedContentType", false);
IndexingParameters ip = new IndexingParameters()
.setConfiguration(config);
updatedExpected.setParameters(ip);
return updatedExpected;
}
void setSameStartTime(SearchIndexer expected, SearchIndexer actual) {
expected.getSchedule().setStartTime(actual.getSchedule().getStartTime());
}
void assertStartAndEndTimeValid(IndexerExecutionResult result) {
assertNotNull(result.getStartTime());
assertNotEquals(OffsetDateTime.now(), result.getStartTime());
assertNotNull(result.getEndTime());
assertNotEquals(OffsetDateTime.now(), result.getEndTime());
}
void assertValidSearchIndexerStatus(SearchIndexerStatus indexerExecutionInfo) {
assertEquals(IndexerExecutionStatus.IN_PROGRESS, indexerExecutionInfo.getLastResult().getStatus());
assertEquals(3, indexerExecutionInfo.getExecutionHistory().size());
SearchIndexerLimits limits = indexerExecutionInfo.getLimits();
assertNotNull(limits);
assertEquals(100000, limits.getMaxDocumentContentCharactersToExtract(), 0);
assertEquals(1000, limits.getMaxDocumentExtractionSize(), 0);
IndexerExecutionResult newestResult = indexerExecutionInfo.getExecutionHistory().get(0);
IndexerExecutionResult middleResult = indexerExecutionInfo.getExecutionHistory().get(1);
IndexerExecutionResult oldestResult = indexerExecutionInfo.getExecutionHistory().get(2);
assertEquals(IndexerExecutionStatus.TRANSIENT_FAILURE, newestResult.getStatus());
assertEquals("The indexer could not connect to the data source",
newestResult.getErrorMessage());
assertStartAndEndTimeValid(newestResult);
assertEquals(IndexerExecutionStatus.RESET, middleResult.getStatus());
assertStartAndEndTimeValid(middleResult);
assertEquals(IndexerExecutionStatus.SUCCESS, oldestResult.getStatus());
assertEquals(124876, oldestResult.getItemCount());
assertEquals(2, oldestResult.getFailedItemCount());
assertEquals("100", oldestResult.getInitialTrackingState());
assertEquals("200", oldestResult.getFinalTrackingState());
assertStartAndEndTimeValid(oldestResult);
assertEquals(2, oldestResult.getErrors().size());
assertEquals("1", oldestResult.getErrors().get(0).getKey());
assertEquals("Key field contains unsafe characters",
oldestResult.getErrors().get(0).getErrorMessage());
assertEquals("DocumentExtraction.AzureBlob.MyDataSource",
oldestResult.getErrors().get(0).getName());
assertEquals("The file could not be parsed.", oldestResult.getErrors().get(0).getDetails());
assertEquals("https:
oldestResult.getErrors().get(0).getDocumentationLink());
assertEquals("121713", oldestResult.getErrors().get(1).getKey());
assertEquals("Item is too large", oldestResult.getErrors().get(1).getErrorMessage());
assertEquals("DocumentExtraction.AzureBlob.DataReader",
oldestResult.getErrors().get(1).getName());
assertEquals("Blob size cannot exceed 256 MB.", oldestResult.getErrors().get(1).getDetails());
assertEquals("https:
oldestResult.getErrors().get(1).getDocumentationLink());
assertEquals(1, oldestResult.getWarnings().size());
assertEquals("2", oldestResult.getWarnings().get(0).getKey());
assertEquals("Document was truncated to 50000 characters.",
oldestResult.getWarnings().get(0).getMessage());
assertEquals("Enrichment.LanguageDetectionSkill.
oldestResult.getWarnings().get(0).getName());
assertEquals("Try to split the input into smaller chunks using Split skill.",
oldestResult.getWarnings().get(0).getDetails());
assertEquals("https:
oldestResult.getWarnings().get(0).getDocumentationLink());
}
} |
The check doesn't match the documentation. This throws a NPE rather than IllegalArgumentExceptionwhen lockToken is null. | public Mono<Instant> renewMessageLock(String lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'lockToken' cannot be null."));
} else if (lockToken.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'lockToken' cannot be empty."));
} else if (receiverOptions.isSessionReceiver()) {
return monoError(logger, new IllegalStateException(
String.format("Cannot renew message lock [%s] for a session receiver.", lockToken)));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(lockToken, getLinkName(null)))
.map(instant -> managementNodeLocks.addOrUpdate(lockToken, instant));
} | } else if (Objects.isNull(lockToken)) { | public Mono<Instant> renewMessageLock(String lockToken) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock")));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'lockToken' cannot be null."));
} else if (lockToken.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'lockToken' cannot be empty."));
} else if (receiverOptions.isSessionReceiver()) {
return monoError(logger, new IllegalStateException(
String.format("Cannot renew message lock [%s] for a session receiver.", lockToken)));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(lockToken, getLinkName(null)))
.map(instant -> managementNodeLocks.addOrUpdate(lockToken, instant));
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private static final String TRANSACTION_LINK_NAME = "coordinator";
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final MessageLockContainer managementNodeLocks;
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final Runnable onClientClose;
private final UnnamedSessionManager unnamedSessionManager;
private final AtomicLong lastPeekedSequenceNumber = new AtomicLong(-1);
private final AtomicReference<ServiceBusAsyncConsumer> consumer = new AtomicReference<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
ReceiverOptions receiverOptions, ServiceBusConnectionProcessor connectionProcessor, Duration cleanupInterval,
TracerProvider tracerProvider, MessageSerializer messageSerializer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.entityType = Objects.requireNonNull(entityType, "'entityType' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveOptions cannot be null.'");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.onClientClose = Objects.requireNonNull(onClientClose, "'onClientClose' cannot be null.");
this.managementNodeLocks = new MessageLockContainer(cleanupInterval);
this.unnamedSessionManager = null;
}
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
ReceiverOptions receiverOptions, ServiceBusConnectionProcessor connectionProcessor, Duration cleanupInterval,
TracerProvider tracerProvider, MessageSerializer messageSerializer, Runnable onClientClose,
UnnamedSessionManager unnamedSessionManager) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.entityType = Objects.requireNonNull(entityType, "'entityType' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveOptions cannot be null.'");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.onClientClose = Objects.requireNonNull(onClientClose, "'onClientClose' cannot be null.");
this.unnamedSessionManager = Objects.requireNonNull(unnamedSessionManager, "'sessionManager' cannot be null.");
this.managementNodeLocks = new MessageLockContainer(cleanupInterval);
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus abandon operation completes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> abandon(String lockToken) {
return abandon(lockToken, receiverOptions.getSessionId());
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to abandon. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the Service Bus abandon operation completes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> abandon(String lockToken, String sessionId) {
return abandon(lockToken, null, sessionId);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> abandon(String lockToken, Map<String, Object> propertiesToModify) {
return abandon(lockToken, propertiesToModify, receiverOptions.getSessionId());
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
* <p><strong>Complete a message with a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.abandonMessageWithTransaction}
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> abandon(String lockToken, Map<String, Object> propertiesToModify,
ServiceBusTransactionContext transactionContext) {
return abandon(lockToken, propertiesToModify, receiverOptions.getSessionId(), transactionContext);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
* @param sessionId Session id of the message to abandon. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the Service Bus abandon operation completes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> abandon(String lockToken, Map<String, Object> propertiesToModify, String sessionId) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null,
propertiesToModify, sessionId, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
* @param sessionId Session id of the message to abandon. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the Service Bus abandon operation completes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> abandon(String lockToken, Map<String, Object> propertiesToModify, String sessionId,
ServiceBusTransactionContext transactionContext) {
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null,
propertiesToModify, sessionId, transactionContext);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that finishes when the message is completed on Service Bus.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> complete(String lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null,
null, receiverOptions.getSessionId(), null);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
* <p><strong>Complete a message with a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.completeMessageWithTransaction}
*
* @param lockToken Lock token of the message.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that finishes when the message is completed on Service Bus.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> complete(String lockToken, ServiceBusTransactionContext transactionContext) {
return complete(lockToken, receiverOptions.getSessionId(), transactionContext);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to complete. {@code null} if there is no session.
*
* @return A {@link Mono} that finishes when the message is completed on Service Bus.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> complete(String lockToken, String sessionId) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null,
null, sessionId, null);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to complete. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that finishes when the message is completed on Service Bus.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> complete(String lockToken, String sessionId,
ServiceBusTransactionContext transactionContext) {
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null,
null, sessionId, transactionContext);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus defer operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken) {
return defer(lockToken, receiverOptions.getSessionId());
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to defer. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the defer operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, String sessionId) {
return defer(lockToken, null, sessionId);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the defer operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, Map<String, Object> propertiesToModify) {
return defer(lockToken, propertiesToModify, receiverOptions.getSessionId());
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the defer operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, Map<String, Object> propertiesToModify,
ServiceBusTransactionContext transactionContext) {
return defer(lockToken, propertiesToModify, receiverOptions.getSessionId(), transactionContext);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
* @param sessionId Session id of the message to defer. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the Service Bus defer operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, Map<String, Object> propertiesToModify, String sessionId) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null,
propertiesToModify, sessionId, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
* @param sessionId Session id of the message to defer. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the Service Bus defer operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, Map<String, Object> propertiesToModify, String sessionId,
ServiceBusTransactionContext transactionContext) {
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null,
propertiesToModify, sessionId, transactionContext);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(String lockToken) {
return deadLetter(lockToken, receiverOptions.getSessionId());
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to deadletter. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(String lockToken, String sessionId) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS, sessionId);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to deadletter. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException iif {@code lockToken} is {@code null} or an empty value.
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(String lockToken, String sessionId,
ServiceBusTransactionContext transactionContext) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS, sessionId, transactionContext);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> deadLetter(String lockToken, DeadLetterOptions deadLetterOptions) {
return deadLetter(lockToken, deadLetterOptions, receiverOptions.getSessionId());
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code deadLetterOptions}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> deadLetter(String lockToken, DeadLetterOptions deadLetterOptions,
ServiceBusTransactionContext transactionContext) {
return deadLetter(lockToken, deadLetterOptions, receiverOptions.getSessionId(), transactionContext);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
* @param sessionId Session id of the message to deadletter. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> deadLetter(String lockToken, DeadLetterOptions deadLetterOptions, String sessionId) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify(), sessionId,
null);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
* @param sessionId Session id of the message to deadletter. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
public Mono<Void> deadLetter(String lockToken, DeadLetterOptions deadLetterOptions, String sessionId,
ServiceBusTransactionContext transactionContext) {
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify(), sessionId,
transactionContext);
}
/**
* Gets the state of a session given its identifier.
*
* @param sessionId Identifier of session to get.
*
* @return The session state or an empty Mono if there is no state set for the session.
* @throws IllegalStateException if the receiver is a non-session receiver.
*/
public Mono<byte[]> getSessionState(String sessionId) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "getSessionState")));
} else if (!receiverOptions.isSessionReceiver()) {
return monoError(logger, new IllegalStateException("Cannot get session state on a non-session receiver."));
}
if (unnamedSessionManager != null) {
return unnamedSessionManager.getSessionState(sessionId);
} else {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(channel -> channel.getSessionState(sessionId, getLinkName(sessionId)));
}
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
return peek(receiverOptions.getSessionId());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @param sessionId Session id of the message to peek from. {@code null} if there is no session.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek(String sessionId) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(channel -> {
final long sequence = lastPeekedSequenceNumber.get() + 1;
logger.verbose("Peek message from sequence number: {}", sequence);
return channel.peek(sequence, sessionId, getLinkName(sessionId));
})
.handle((message, sink) -> {
final long current = lastPeekedSequenceNumber
.updateAndGet(value -> Math.max(value, message.getSequenceNumber()));
logger.verbose("Updating last peeked sequence number: {}", current);
sink.next(message);
});
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
return peekAt(sequenceNumber, receiverOptions.getSessionId());
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
* @param sessionId Session id of the message to peek from. {@code null} if there is no session.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber, String sessionId) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber, sessionId, getLinkName(sessionId)));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
return peekBatch(maxMessages, receiverOptions.getSessionId());
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sessionId Session id of the messages to peek from. {@code null} if there is no session.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages, String sessionId) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> {
final long nextSequenceNumber = lastPeekedSequenceNumber.get() + 1;
logger.verbose("Peek batch from sequence number: {}", nextSequenceNumber);
final Flux<ServiceBusReceivedMessage> messages =
node.peek(nextSequenceNumber, sessionId, getLinkName(sessionId), maxMessages);
final Mono<ServiceBusReceivedMessage> handle = messages
.switchIfEmpty(Mono.fromCallable(() -> {
ServiceBusReceivedMessage emptyMessage = new ServiceBusReceivedMessage(new byte[0]);
emptyMessage.setSequenceNumber(lastPeekedSequenceNumber.get());
return emptyMessage;
}))
.last()
.handle((last, sink) -> {
final long current = lastPeekedSequenceNumber
.updateAndGet(value -> Math.max(value, last.getSequenceNumber()));
logger.verbose("Last peeked sequence number in batch: {}", current);
sink.complete();
});
return Flux.merge(messages, handle);
});
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
return peekBatchAt(maxMessages, sequenceNumber, receiverOptions.getSessionId());
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
* @param sessionId Session id of the messages to peek from. {@code null} if there is no session.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber, String sessionId) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peek(sequenceNumber, sessionId, getLinkName(sessionId), maxMessages));
}
/**
* Receives an <b>infinite</b> stream of {@link ServiceBusReceivedMessage messages} from the Service Bus
* entity. This Flux continuously receives messages from a Service Bus entity until either:
*
* <ul>
* <li>The receiver is closed.</li>
* <li>The subscription to the Flux is disposed.</li>
* <li>A terminal signal from a downstream subscriber is propagated upstream (ie. {@link Flux
* {@link Flux
* <li>An {@link AmqpException} occurs that causes the receive link to stop.</li>
* </ul>
*
* @return An <b>infinite</b> stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessageContext> receive() {
if (unnamedSessionManager != null) {
return unnamedSessionManager.receive();
} else {
return getOrCreateConsumer().receive().map(ServiceBusReceivedMessageContext::new);
}
}
/**
* Receives a bounded stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity. This stream
* receives either {@code maxNumberOfMessages} are received or the {@code maxWaitTime} has elapsed.
*
* @param maxNumberOfMessages Maximum number of messages to receive.
* @param maxWaitTime Maximum time to wait.
*
* @return A bounded {@link Flux} of messages.
* @throws NullPointerException if {@code maxWaitTime} is null.
* @throws IllegalArgumentException if {@code maxNumberOfMessages} is less than 1. {@code maxWaitTime} is zero
* or a negative duration.
*/
public Flux<ServiceBusReceivedMessageContext> receive(int maxNumberOfMessages, Duration maxWaitTime) {
if (maxNumberOfMessages < 1) {
return fluxError(logger, new IllegalArgumentException("'maxNumberOfMessages' cannot be less than 1."));
} else if (maxWaitTime == null) {
return fluxError(logger, new NullPointerException("'maxWaitTime' cannot be null."));
} else if (maxWaitTime.isNegative() || maxWaitTime.isZero()) {
return fluxError(logger, new NullPointerException("'maxWaitTime' cannot be negative or zero."));
}
return receive().take(maxNumberOfMessages).take(maxWaitTime);
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return receiveDeferredMessage(sequenceNumber, receiverOptions.getSessionId());
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
* @param sessionId Session id of the deferred message. {@code null} if there is no session.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber, String sessionId) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessages(receiverOptions.getReceiveMode(),
sessionId, getLinkName(sessionId), Collections.singleton(sequenceNumber)).last())
.map(receivedMessage -> {
if (CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
return receivedMessage;
}
if (receiverOptions.getReceiveMode() == ReceiveMode.PEEK_LOCK) {
receivedMessage.setLockedUntil(managementNodeLocks.addOrUpdate(receivedMessage.getLockToken(),
receivedMessage.getLockedUntil()));
}
return receivedMessage;
});
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(Iterable<Long> sequenceNumbers) {
return receiveDeferredMessageBatch(sequenceNumbers, receiverOptions.getSessionId());
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
* @param sessionId Session id of the deferred messages. {@code null} if there is no session.
*
* @return An {@link IterableStream} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(Iterable<Long> sequenceNumbers,
String sessionId) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessages(receiverOptions.getReceiveMode(),
sessionId, getLinkName(sessionId), sequenceNumbers))
.map(receivedMessage -> {
if (CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
return receivedMessage;
}
if (receiverOptions.getReceiveMode() == ReceiveMode.PEEK_LOCK) {
receivedMessage.setLockedUntil(managementNodeLocks.addOrUpdate(receivedMessage.getLockToken(),
receivedMessage.getLockedUntil()));
}
return receivedMessage;
});
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalStateException if the receiver is a session receiver.
* @throws IllegalArgumentException if {@code lockToken} is {@code null} or an empty value.
*/
/**
* Sets the state of a session given its identifier.
*
* @param sessionId Identifier of session to get.
*
* @return The next expiration time for the session lock.
* @throws IllegalStateException if the receiver is a non-session receiver.
*/
public Mono<Instant> renewSessionLock(String sessionId) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewSessionLock")));
} else if (!receiverOptions.isSessionReceiver()) {
return monoError(logger, new IllegalStateException("Cannot renew session lock on a non-session receiver."));
}
final String linkName = unnamedSessionManager != null
? unnamedSessionManager.getLinkName(sessionId)
: null;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(channel -> channel.renewSessionLock(sessionId, linkName));
}
/**
* Sets the state of a session given its identifier.
*
* @param sessionId Identifier of session to get.
* @param sessionState State to set on the session.
*
* @return A Mono that completes when the session is set
* @throws IllegalStateException if the receiver is a non-session receiver.
*/
public Mono<Void> setSessionState(String sessionId, byte[] sessionState) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "setSessionState")));
} else if (!receiverOptions.isSessionReceiver()) {
return monoError(logger, new IllegalStateException("Cannot set session state on a non-session receiver."));
}
final String linkName = unnamedSessionManager != null
? unnamedSessionManager.getLinkName(sessionId)
: null;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(channel -> channel.setSessionState(sessionId, sessionState, linkName));
}
/**
* Starts a new service side transaction. The {@link ServiceBusTransactionContext} should be passed to all
* operations that needs to be in this transaction.
*
* <p><strong>Create a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.createTransaction}
*
* @return The {@link Mono} that finishes this operation on service bus resource.
*/
public Mono<ServiceBusTransactionContext> createTransaction() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "createTransaction")));
}
return connectionProcessor
.flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME))
.flatMap(transactionSession -> transactionSession.createTransaction())
.map(transaction -> new ServiceBusTransactionContext(transaction.getTransactionId()));
}
/**
* Commits the transaction given {@link ServiceBusTransactionContext}. This will make a call to Service Bus.
* <p><strong>Commit a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.commitTransaction}
*
* @param transactionContext to be committed.
* @throws NullPointerException if {@code transactionContext} or {@code transactionContext.transactionId} is null.
*
* @return The {@link Mono} that finishes this operation on service bus resource.
*/
public Mono<Void> commitTransaction(ServiceBusTransactionContext transactionContext) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "commitTransaction")));
}
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return connectionProcessor
.flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME))
.flatMap(transactionSession -> transactionSession.commitTransaction(new AmqpTransaction(
transactionContext.getTransactionId())));
}
/**
* Rollbacks the transaction given {@link ServiceBusTransactionContext}. This will make a call to Service Bus.
* <p><strong>Rollback a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.rollbackTransaction}
*
* @param transactionContext to be rollbacked.
* @throws NullPointerException if {@code transactionContext} or {@code transactionContext.transactionId} is null.
*
* @return The {@link Mono} that finishes this operation on service bus resource.
*/
public Mono<Void> rollbackTransaction(ServiceBusTransactionContext transactionContext) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "rollbackTransaction")));
}
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return connectionProcessor
.flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME))
.flatMap(transactionSession -> transactionSession.rollbackTransaction(new AmqpTransaction(
transactionContext.getTransactionId())));
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
final ServiceBusAsyncConsumer disposed = consumer.getAndSet(null);
if (disposed != null) {
disposed.close();
}
if (unnamedSessionManager != null) {
unnamedSessionManager.close();
}
onClientClose.run();
}
/**
* Gets whether or not the management node contains the message lock token and it has not expired. Lock tokens are
* held by the management node when they are received from the management node or management operations are
* performed using that {@code lockToken}.
*
* @param lockToken Lock token to check for.
*
* @return {@code true} if the management node contains the lock token and false otherwise.
*/
private boolean isManagementToken(String lockToken) {
return managementNodeLocks.contains(lockToken);
}
private Mono<Void> updateDisposition(String lockToken, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify,
String sessionId, ServiceBusTransactionContext transactionContext) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'lockToken' cannot be null."));
} else if (lockToken.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'lockToken' cannot be empty."));
}
if (receiverOptions.getReceiveMode() != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
}
final String sessionIdToUse;
if (sessionId == null && !CoreUtils.isNullOrEmpty(receiverOptions.getSessionId())) {
sessionIdToUse = receiverOptions.getSessionId();
} else {
sessionIdToUse = sessionId;
}
logger.info("{}: Update started. Disposition: {}. Lock: {}. SessionId {}.", entityPath, dispositionStatus,
lockToken, sessionIdToUse);
final Mono<Void> performOnManagement = connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify, sessionId, getLinkName(sessionId), transactionContext))
.then(Mono.fromRunnable(() -> {
logger.info("{}: Management node Update completed. Disposition: {}. Lock: {}.",
entityPath, dispositionStatus, lockToken);
managementNodeLocks.remove(lockToken);
}));
if (unnamedSessionManager != null) {
return unnamedSessionManager.updateDisposition(lockToken, sessionId, dispositionStatus, propertiesToModify,
deadLetterReason, deadLetterErrorDescription, transactionContext)
.flatMap(isSuccess -> {
if (isSuccess) {
return Mono.empty();
}
logger.info("Could not perform on session manger. Performing on management node.");
return performOnManagement;
});
}
final ServiceBusAsyncConsumer existingConsumer = consumer.get();
if (isManagementToken(lockToken) || existingConsumer == null) {
return performOnManagement;
} else {
return existingConsumer.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify, transactionContext)
.then(Mono.fromRunnable(() -> logger.info("{}: Update completed. Disposition: {}. Lock: {}.",
entityPath, dispositionStatus, lockToken)));
}
}
private ServiceBusAsyncConsumer getOrCreateConsumer() {
final ServiceBusAsyncConsumer existing = consumer.get();
if (existing != null) {
return existing;
}
final String linkName = StringUtil.getRandomString(entityPath);
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<ServiceBusReceiveLink> receiveLink = connectionProcessor.flatMap(connection -> {
if (receiverOptions.isSessionReceiver()) {
return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(),
null, entityType, receiverOptions.getSessionId());
} else {
return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(),
null, entityType);
}
})
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiverOptions.getReceiveMode(),
CoreUtils.isNullOrEmpty(receiverOptions.getSessionId()), "N/A", entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName,
null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(receiverOptions.getPrefetchCount(), retryPolicy, connectionProcessor,
context));
final ServiceBusAsyncConsumer newConsumer = new ServiceBusAsyncConsumer(linkName, linkMessageProcessor,
messageSerializer, false, receiverOptions.autoLockRenewalEnabled(),
receiverOptions.getMaxAutoLockRenewalDuration(), connectionProcessor.getRetryOptions(),
(token, associatedLinkName) -> renewMessageLock(token, associatedLinkName));
if (consumer.compareAndSet(null, newConsumer)) {
return newConsumer;
} else {
newConsumer.close();
return consumer.get();
}
}
/**
* @return receiver options set by user;
*/
ReceiverOptions getReceiverOptions() {
return receiverOptions;
}
/**
* Renews the message lock, and updates its value in the container.
*/
private Mono<Instant> renewMessageLock(String lockToken, String linkName) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(lockToken, linkName));
}
/**
* If the receiver has not connected via {@link
* the management node.
*
* @return The name of the receive link, or null of it has not connected via a receive link.
*/
private String getLinkName(String sessionId) {
if (unnamedSessionManager != null && !CoreUtils.isNullOrEmpty(sessionId)) {
return unnamedSessionManager.getLinkName(sessionId);
} else if (!CoreUtils.isNullOrEmpty(sessionId) && !receiverOptions.isSessionReceiver()) {
return null;
} else {
final ServiceBusAsyncConsumer existing = consumer.get();
return existing != null ? existing.getLinkName() : null;
}
}
} | class ServiceBusReceiverAsyncClient implements AutoCloseable {
private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions();
private static final String TRANSACTION_LINK_NAME = "coordinator";
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final MessageLockContainer managementNodeLocks;
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class);
private final String fullyQualifiedNamespace;
private final String entityPath;
private final MessagingEntityType entityType;
private final ReceiverOptions receiverOptions;
private final ServiceBusConnectionProcessor connectionProcessor;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final Runnable onClientClose;
private final UnnamedSessionManager unnamedSessionManager;
private final AtomicLong lastPeekedSequenceNumber = new AtomicLong(-1);
private final AtomicReference<ServiceBusAsyncConsumer> consumer = new AtomicReference<>();
/**
* Creates a receiver that listens to a Service Bus resource.
*
* @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource.
* @param entityPath The name of the topic or queue.
* @param entityType The type of the Service Bus resource.
* @param receiverOptions Options when receiving messages.
* @param connectionProcessor The AMQP connection to the Service Bus resource.
* @param tracerProvider Tracer for telemetry.
* @param messageSerializer Serializes and deserializes Service Bus messages.
* @param onClientClose Operation to run when the client completes.
*/
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
ReceiverOptions receiverOptions, ServiceBusConnectionProcessor connectionProcessor, Duration cleanupInterval,
TracerProvider tracerProvider, MessageSerializer messageSerializer, Runnable onClientClose) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.entityType = Objects.requireNonNull(entityType, "'entityType' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveOptions cannot be null.'");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.onClientClose = Objects.requireNonNull(onClientClose, "'onClientClose' cannot be null.");
this.managementNodeLocks = new MessageLockContainer(cleanupInterval);
this.unnamedSessionManager = null;
}
ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType,
ReceiverOptions receiverOptions, ServiceBusConnectionProcessor connectionProcessor, Duration cleanupInterval,
TracerProvider tracerProvider, MessageSerializer messageSerializer, Runnable onClientClose,
UnnamedSessionManager unnamedSessionManager) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.entityType = Objects.requireNonNull(entityType, "'entityType' cannot be null.");
this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveOptions cannot be null.'");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.onClientClose = Objects.requireNonNull(onClientClose, "'onClientClose' cannot be null.");
this.unnamedSessionManager = Objects.requireNonNull(unnamedSessionManager, "'sessionManager' cannot be null.");
this.managementNodeLocks = new MessageLockContainer(cleanupInterval);
}
/**
* Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Service Bus namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Service Bus resource this client interacts with.
*
* @return The Service Bus resource this client interacts with.
*/
public String getEntityPath() {
return entityPath;
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus abandon operation completes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> abandon(String lockToken) {
return abandon(lockToken, receiverOptions.getSessionId());
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token. This will make the message available
* again for processing. Abandoning a message will increase the delivery count on the message.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to abandon. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the Service Bus abandon operation completes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> abandon(String lockToken, String sessionId) {
return abandon(lockToken, null, sessionId);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> abandon(String lockToken, Map<String, Object> propertiesToModify) {
return abandon(lockToken, propertiesToModify, receiverOptions.getSessionId());
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
* <p><strong>Complete a message with a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.abandonMessageWithTransaction}
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the Service Bus operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> abandon(String lockToken, Map<String, Object> propertiesToModify,
ServiceBusTransactionContext transactionContext) {
return abandon(lockToken, propertiesToModify, receiverOptions.getSessionId(), transactionContext);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
* @param sessionId Session id of the message to abandon. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the Service Bus abandon operation completes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> abandon(String lockToken, Map<String, Object> propertiesToModify, String sessionId) {
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null,
propertiesToModify, sessionId, null);
}
/**
* Abandon a {@link ServiceBusReceivedMessage message} with its lock token and updates the message's properties.
* This will make the message available again for processing. Abandoning a message will increase the delivery count
* on the message.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Properties to modify on the message.
* @param sessionId Session id of the message to abandon. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the Service Bus abandon operation completes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> abandon(String lockToken, Map<String, Object> propertiesToModify, String sessionId,
ServiceBusTransactionContext transactionContext) {
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.ABANDONED, null, null,
propertiesToModify, sessionId, transactionContext);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that finishes when the message is completed on Service Bus.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> complete(String lockToken) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null,
null, receiverOptions.getSessionId(), null);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
* <p><strong>Complete a message with a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.completeMessageWithTransaction}
*
* @param lockToken Lock token of the message.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that finishes when the message is completed on Service Bus.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> complete(String lockToken, ServiceBusTransactionContext transactionContext) {
return complete(lockToken, receiverOptions.getSessionId(), transactionContext);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to complete. {@code null} if there is no session.
*
* @return A {@link Mono} that finishes when the message is completed on Service Bus.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> complete(String lockToken, String sessionId) {
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null,
null, sessionId, null);
}
/**
* Completes a {@link ServiceBusReceivedMessage message} using its lock token. This will delete the message from the
* service.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to complete. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that finishes when the message is completed on Service Bus.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> complete(String lockToken, String sessionId,
ServiceBusTransactionContext transactionContext) {
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.COMPLETED, null, null,
null, sessionId, transactionContext);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the Service Bus defer operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken) {
return defer(lockToken, receiverOptions.getSessionId());
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token. This will move message into the deferred
* subqueue.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to defer. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the defer operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, String sessionId) {
return defer(lockToken, null, sessionId);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
*
* @return A {@link Mono} that completes when the defer operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, Map<String, Object> propertiesToModify) {
return defer(lockToken, propertiesToModify, receiverOptions.getSessionId());
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the defer operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, Map<String, Object> propertiesToModify,
ServiceBusTransactionContext transactionContext) {
return defer(lockToken, propertiesToModify, receiverOptions.getSessionId(), transactionContext);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
* @param sessionId Session id of the message to defer. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the Service Bus defer operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, Map<String, Object> propertiesToModify, String sessionId) {
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null,
propertiesToModify, sessionId, null);
}
/**
* Defers a {@link ServiceBusReceivedMessage message} using its lock token with modified message property. This will
* move message into the deferred subqueue.
*
* @param lockToken Lock token of the message.
* @param propertiesToModify Message properties to modify.
* @param sessionId Session id of the message to defer. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the Service Bus defer operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
*/
public Mono<Void> defer(String lockToken, Map<String, Object> propertiesToModify, String sessionId,
ServiceBusTransactionContext transactionContext) {
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.DEFERRED, null, null,
propertiesToModify, sessionId, transactionContext);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(String lockToken) {
return deadLetter(lockToken, receiverOptions.getSessionId());
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to deadletter. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(String lockToken, String sessionId) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS, sessionId);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue.
*
* @param lockToken Lock token of the message.
* @param sessionId Session id of the message to deadletter. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
* @see <a href="https:
* queues</a>
*/
public Mono<Void> deadLetter(String lockToken, String sessionId,
ServiceBusTransactionContext transactionContext) {
return deadLetter(lockToken, DEFAULT_DEAD_LETTER_OPTIONS, sessionId, transactionContext);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} or {@code deadLetterOptions} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> deadLetter(String lockToken, DeadLetterOptions deadLetterOptions) {
return deadLetter(lockToken, deadLetterOptions, receiverOptions.getSessionId());
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken}, {@code deadLetterOptions}, {@code transactionContext} or
* {@code transactionContext.transactionId} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> deadLetter(String lockToken, DeadLetterOptions deadLetterOptions,
ServiceBusTransactionContext transactionContext) {
return deadLetter(lockToken, deadLetterOptions, receiverOptions.getSessionId(), transactionContext);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
* @param sessionId Session id of the message to deadletter. {@code null} if there is no session.
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> deadLetter(String lockToken, DeadLetterOptions deadLetterOptions, String sessionId) {
if (Objects.isNull(deadLetterOptions)) {
return monoError(logger, new NullPointerException("'deadLetterOptions' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify(), sessionId,
null);
}
/**
* Moves a {@link ServiceBusReceivedMessage message} to the deadletter subqueue with deadletter reason, error
* description, and/or modified properties.
*
* @param lockToken Lock token of the message.
* @param deadLetterOptions The options to specify when moving message to the deadletter sub-queue.
* @param sessionId Session id of the message to deadletter. {@code null} if there is no session.
* @param transactionContext in which this operation is taking part in. The transaction should be created first by
* {@link ServiceBusReceiverAsyncClient
* {@link ServiceBusSenderAsyncClient
*
* @return A {@link Mono} that completes when the dead letter operation finishes.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
public Mono<Void> deadLetter(String lockToken, DeadLetterOptions deadLetterOptions, String sessionId,
ServiceBusTransactionContext transactionContext) {
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return updateDisposition(lockToken, DispositionStatus.SUSPENDED, deadLetterOptions.getDeadLetterReason(),
deadLetterOptions.getDeadLetterErrorDescription(), deadLetterOptions.getPropertiesToModify(), sessionId,
transactionContext);
}
/**
* Gets the state of a session given its identifier.
*
* @param sessionId Identifier of session to get.
*
* @return The session state or an empty Mono if there is no state set for the session.
* @throws IllegalStateException if the receiver is a non-session receiver.
*/
public Mono<byte[]> getSessionState(String sessionId) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "getSessionState")));
} else if (!receiverOptions.isSessionReceiver()) {
return monoError(logger, new IllegalStateException("Cannot get session state on a non-session receiver."));
}
if (unnamedSessionManager != null) {
return unnamedSessionManager.getSessionState(sessionId);
} else {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(channel -> channel.getSessionState(sessionId, getLinkName(sessionId)));
}
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek() {
return peek(receiverOptions.getSessionId());
}
/**
* Reads the next active message without changing the state of the receiver or the message source. The first call to
* {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent
* message in the entity.
*
* @param sessionId Session id of the message to peek from. {@code null} if there is no session.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peek(String sessionId) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(channel -> {
final long sequence = lastPeekedSequenceNumber.get() + 1;
logger.verbose("Peek message from sequence number: {}", sequence);
return channel.peek(sequence, sessionId, getLinkName(sessionId));
})
.handle((message, sink) -> {
final long current = lastPeekedSequenceNumber
.updateAndGet(value -> Math.max(value, message.getSequenceNumber()));
logger.verbose("Updating last peeked sequence number: {}", current);
sink.next(message);
});
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber) {
return peekAt(sequenceNumber, receiverOptions.getSessionId());
}
/**
* Starting from the given sequence number, reads next the active message without changing the state of the receiver
* or the message source.
*
* @param sequenceNumber The sequence number from where to read the message.
* @param sessionId Session id of the message to peek from. {@code null} if there is no session.
*
* @return A peeked {@link ServiceBusReceivedMessage}.
* @see <a href="https:
*/
public Mono<ServiceBusReceivedMessage> peekAt(long sequenceNumber, String sessionId) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.peek(sequenceNumber, sessionId, getLinkName(sessionId)));
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages) {
return peekBatch(maxMessages, receiverOptions.getSessionId());
}
/**
* Reads the next batch of active messages without changing the state of the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sessionId Session id of the messages to peek from. {@code null} if there is no session.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage messages} that are peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatch(int maxMessages, String sessionId) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> {
final long nextSequenceNumber = lastPeekedSequenceNumber.get() + 1;
logger.verbose("Peek batch from sequence number: {}", nextSequenceNumber);
final Flux<ServiceBusReceivedMessage> messages =
node.peek(nextSequenceNumber, sessionId, getLinkName(sessionId), maxMessages);
final Mono<ServiceBusReceivedMessage> handle = messages
.switchIfEmpty(Mono.fromCallable(() -> {
ServiceBusReceivedMessage emptyMessage = new ServiceBusReceivedMessage(new byte[0]);
emptyMessage.setSequenceNumber(lastPeekedSequenceNumber.get());
return emptyMessage;
}))
.last()
.handle((last, sink) -> {
final long current = lastPeekedSequenceNumber
.updateAndGet(value -> Math.max(value, last.getSequenceNumber()));
logger.verbose("Last peeked sequence number in batch: {}", current);
sink.complete();
});
return Flux.merge(messages, handle);
});
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
*
* @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber) {
return peekBatchAt(maxMessages, sequenceNumber, receiverOptions.getSessionId());
}
/**
* Starting from the given sequence number, reads the next batch of active messages without changing the state of
* the receiver or the message source.
*
* @param maxMessages The number of messages.
* @param sequenceNumber The sequence number from where to start reading messages.
* @param sessionId Session id of the messages to peek from. {@code null} if there is no session.
*
* @return An {@link IterableStream} of {@link ServiceBusReceivedMessage} peeked.
* @throws IllegalArgumentException if {@code maxMessages} is not a positive integer.
* @see <a href="https:
*/
public Flux<ServiceBusReceivedMessage> peekBatchAt(int maxMessages, long sequenceNumber, String sessionId) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.peek(sequenceNumber, sessionId, getLinkName(sessionId), maxMessages));
}
/**
* Receives an <b>infinite</b> stream of {@link ServiceBusReceivedMessage messages} from the Service Bus
* entity. This Flux continuously receives messages from a Service Bus entity until either:
*
* <ul>
* <li>The receiver is closed.</li>
* <li>The subscription to the Flux is disposed.</li>
* <li>A terminal signal from a downstream subscriber is propagated upstream (ie. {@link Flux
* {@link Flux
* <li>An {@link AmqpException} occurs that causes the receive link to stop.</li>
* </ul>
*
* @return An <b>infinite</b> stream of messages from the Service Bus entity.
*/
public Flux<ServiceBusReceivedMessageContext> receive() {
if (unnamedSessionManager != null) {
return unnamedSessionManager.receive();
} else {
return getOrCreateConsumer().receive().map(ServiceBusReceivedMessageContext::new);
}
}
/**
* Receives a bounded stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity. This stream
* receives either {@code maxNumberOfMessages} are received or the {@code maxWaitTime} has elapsed.
*
* @param maxNumberOfMessages Maximum number of messages to receive.
* @param maxWaitTime Maximum time to wait.
*
* @return A bounded {@link Flux} of messages.
* @throws NullPointerException if {@code maxWaitTime} is null.
* @throws IllegalArgumentException if {@code maxNumberOfMessages} is less than 1. {@code maxWaitTime} is zero
* or a negative duration.
*/
public Flux<ServiceBusReceivedMessageContext> receive(int maxNumberOfMessages, Duration maxWaitTime) {
if (maxNumberOfMessages < 1) {
return fluxError(logger, new IllegalArgumentException("'maxNumberOfMessages' cannot be less than 1."));
} else if (maxWaitTime == null) {
return fluxError(logger, new NullPointerException("'maxWaitTime' cannot be null."));
} else if (maxWaitTime.isNegative() || maxWaitTime.isZero()) {
return fluxError(logger, new NullPointerException("'maxWaitTime' cannot be negative or zero."));
}
return receive().take(maxNumberOfMessages).take(maxWaitTime);
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) {
return receiveDeferredMessage(sequenceNumber, receiverOptions.getSessionId());
}
/**
* Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using
* sequence number.
*
* @param sequenceNumber The {@link ServiceBusReceivedMessage
* message.
* @param sessionId Session id of the deferred message. {@code null} if there is no session.
*
* @return A deferred message with the matching {@code sequenceNumber}.
*/
public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber, String sessionId) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.receiveDeferredMessages(receiverOptions.getReceiveMode(),
sessionId, getLinkName(sessionId), Collections.singleton(sequenceNumber)).last())
.map(receivedMessage -> {
if (CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
return receivedMessage;
}
if (receiverOptions.getReceiveMode() == ReceiveMode.PEEK_LOCK) {
receivedMessage.setLockedUntil(managementNodeLocks.addOrUpdate(receivedMessage.getLockToken(),
receivedMessage.getLockedUntil()));
}
return receivedMessage;
});
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
*
* @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(Iterable<Long> sequenceNumbers) {
return receiveDeferredMessageBatch(sequenceNumbers, receiverOptions.getSessionId());
}
/**
* Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received
* by using sequence number.
*
* @param sequenceNumbers The sequence numbers of the deferred messages.
* @param sessionId Session id of the deferred messages. {@code null} if there is no session.
*
* @return An {@link IterableStream} of deferred {@link ServiceBusReceivedMessage messages}.
*/
public Flux<ServiceBusReceivedMessage> receiveDeferredMessageBatch(Iterable<Long> sequenceNumbers,
String sessionId) {
if (isDisposed.get()) {
return fluxError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch")));
}
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMapMany(node -> node.receiveDeferredMessages(receiverOptions.getReceiveMode(),
sessionId, getLinkName(sessionId), sequenceNumbers))
.map(receivedMessage -> {
if (CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) {
return receivedMessage;
}
if (receiverOptions.getReceiveMode() == ReceiveMode.PEEK_LOCK) {
receivedMessage.setLockedUntil(managementNodeLocks.addOrUpdate(receivedMessage.getLockToken(),
receivedMessage.getLockedUntil()));
}
return receivedMessage;
});
}
/**
* Asynchronously renews the lock on the specified message. The lock will be renewed based on the setting specified
* on the entity. When a message is received in {@link ReceiveMode
* server for this receiver instance for a duration as specified during the Queue creation (LockDuration). If
* processing of the message requires longer than this duration, the lock needs to be renewed. For each renewal, the
* lock is reset to the entity's LockDuration value.
*
* @param lockToken Lock token of the message to renew.
*
* @return The new expiration time for the message.
* @throws NullPointerException if {@code lockToken} is null.
* @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode
* mode.
* @throws IllegalStateException if the receiver is a session receiver.
* @throws IllegalArgumentException if {@code lockToken} is an empty value.
*/
/**
* Sets the state of a session given its identifier.
*
* @param sessionId Identifier of session to get.
*
* @return The next expiration time for the session lock.
* @throws IllegalStateException if the receiver is a non-session receiver.
*/
public Mono<Instant> renewSessionLock(String sessionId) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewSessionLock")));
} else if (!receiverOptions.isSessionReceiver()) {
return monoError(logger, new IllegalStateException("Cannot renew session lock on a non-session receiver."));
}
final String linkName = unnamedSessionManager != null
? unnamedSessionManager.getLinkName(sessionId)
: null;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(channel -> channel.renewSessionLock(sessionId, linkName));
}
/**
* Sets the state of a session given its identifier.
*
* @param sessionId Identifier of session to get.
* @param sessionState State to set on the session.
*
* @return A Mono that completes when the session is set
* @throws IllegalStateException if the receiver is a non-session receiver.
*/
public Mono<Void> setSessionState(String sessionId, byte[] sessionState) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "setSessionState")));
} else if (!receiverOptions.isSessionReceiver()) {
return monoError(logger, new IllegalStateException("Cannot set session state on a non-session receiver."));
}
final String linkName = unnamedSessionManager != null
? unnamedSessionManager.getLinkName(sessionId)
: null;
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(channel -> channel.setSessionState(sessionId, sessionState, linkName));
}
/**
* Starts a new service side transaction. The {@link ServiceBusTransactionContext} should be passed to all
* operations that needs to be in this transaction.
*
* <p><strong>Create a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.createTransaction}
*
* @return The {@link Mono} that finishes this operation on service bus resource.
*/
public Mono<ServiceBusTransactionContext> createTransaction() {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "createTransaction")));
}
return connectionProcessor
.flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME))
.flatMap(transactionSession -> transactionSession.createTransaction())
.map(transaction -> new ServiceBusTransactionContext(transaction.getTransactionId()));
}
/**
* Commits the transaction given {@link ServiceBusTransactionContext}. This will make a call to Service Bus.
* <p><strong>Commit a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.commitTransaction}
*
* @param transactionContext to be committed.
* @throws NullPointerException if {@code transactionContext} or {@code transactionContext.transactionId} is null.
*
* @return The {@link Mono} that finishes this operation on service bus resource.
*/
public Mono<Void> commitTransaction(ServiceBusTransactionContext transactionContext) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "commitTransaction")));
}
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return connectionProcessor
.flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME))
.flatMap(transactionSession -> transactionSession.commitTransaction(new AmqpTransaction(
transactionContext.getTransactionId())));
}
/**
* Rollbacks the transaction given {@link ServiceBusTransactionContext}. This will make a call to Service Bus.
* <p><strong>Rollback a transaction</strong></p>
* {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.rollbackTransaction}
*
* @param transactionContext to be rollbacked.
* @throws NullPointerException if {@code transactionContext} or {@code transactionContext.transactionId} is null.
*
* @return The {@link Mono} that finishes this operation on service bus resource.
*/
public Mono<Void> rollbackTransaction(ServiceBusTransactionContext transactionContext) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "rollbackTransaction")));
}
if (Objects.isNull(transactionContext)) {
return monoError(logger, new NullPointerException("'transactionContext' cannot be null."));
} else if (Objects.isNull(transactionContext.getTransactionId())) {
return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null."));
}
return connectionProcessor
.flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME))
.flatMap(transactionSession -> transactionSession.rollbackTransaction(new AmqpTransaction(
transactionContext.getTransactionId())));
}
/**
* Disposes of the consumer by closing the underlying connection to the service.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
logger.info("Removing receiver links.");
final ServiceBusAsyncConsumer disposed = consumer.getAndSet(null);
if (disposed != null) {
disposed.close();
}
if (unnamedSessionManager != null) {
unnamedSessionManager.close();
}
onClientClose.run();
}
/**
* Gets whether or not the management node contains the message lock token and it has not expired. Lock tokens are
* held by the management node when they are received from the management node or management operations are
* performed using that {@code lockToken}.
*
* @param lockToken Lock token to check for.
*
* @return {@code true} if the management node contains the lock token and false otherwise.
*/
private boolean isManagementToken(String lockToken) {
return managementNodeLocks.contains(lockToken);
}
private Mono<Void> updateDisposition(String lockToken, DispositionStatus dispositionStatus,
String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify,
String sessionId, ServiceBusTransactionContext transactionContext) {
if (isDisposed.get()) {
return monoError(logger, new IllegalStateException(
String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue())));
} else if (Objects.isNull(lockToken)) {
return monoError(logger, new NullPointerException("'lockToken' cannot be null."));
} else if (lockToken.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'lockToken' cannot be empty."));
}
if (receiverOptions.getReceiveMode() != ReceiveMode.PEEK_LOCK) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus))));
}
final String sessionIdToUse;
if (sessionId == null && !CoreUtils.isNullOrEmpty(receiverOptions.getSessionId())) {
sessionIdToUse = receiverOptions.getSessionId();
} else {
sessionIdToUse = sessionId;
}
logger.info("{}: Update started. Disposition: {}. Lock: {}. SessionId {}.", entityPath, dispositionStatus,
lockToken, sessionIdToUse);
final Mono<Void> performOnManagement = connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(node -> node.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify, sessionId, getLinkName(sessionId), transactionContext))
.then(Mono.fromRunnable(() -> {
logger.info("{}: Management node Update completed. Disposition: {}. Lock: {}.",
entityPath, dispositionStatus, lockToken);
managementNodeLocks.remove(lockToken);
}));
if (unnamedSessionManager != null) {
return unnamedSessionManager.updateDisposition(lockToken, sessionId, dispositionStatus, propertiesToModify,
deadLetterReason, deadLetterErrorDescription, transactionContext)
.flatMap(isSuccess -> {
if (isSuccess) {
return Mono.empty();
}
logger.info("Could not perform on session manger. Performing on management node.");
return performOnManagement;
});
}
final ServiceBusAsyncConsumer existingConsumer = consumer.get();
if (isManagementToken(lockToken) || existingConsumer == null) {
return performOnManagement;
} else {
return existingConsumer.updateDisposition(lockToken, dispositionStatus, deadLetterReason,
deadLetterErrorDescription, propertiesToModify, transactionContext)
.then(Mono.fromRunnable(() -> logger.info("{}: Update completed. Disposition: {}. Lock: {}.",
entityPath, dispositionStatus, lockToken)));
}
}
private ServiceBusAsyncConsumer getOrCreateConsumer() {
final ServiceBusAsyncConsumer existing = consumer.get();
if (existing != null) {
return existing;
}
final String linkName = StringUtil.getRandomString(entityPath);
logger.info("{}: Creating consumer for link '{}'", entityPath, linkName);
final Flux<ServiceBusReceiveLink> receiveLink = connectionProcessor.flatMap(connection -> {
if (receiverOptions.isSessionReceiver()) {
return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(),
null, entityType, receiverOptions.getSessionId());
} else {
return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(),
null, entityType);
}
})
.doOnNext(next -> {
final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]"
+ " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]";
logger.verbose(format, next.getEntityPath(), receiverOptions.getReceiveMode(),
CoreUtils.isNullOrEmpty(receiverOptions.getSessionId()), "N/A", entityType);
})
.repeat();
final LinkErrorContext context = new LinkErrorContext(fullyQualifiedNamespace, entityPath, linkName,
null);
final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions());
final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith(
new ServiceBusReceiveLinkProcessor(receiverOptions.getPrefetchCount(), retryPolicy, connectionProcessor,
context));
final ServiceBusAsyncConsumer newConsumer = new ServiceBusAsyncConsumer(linkName, linkMessageProcessor,
messageSerializer, false, receiverOptions.autoLockRenewalEnabled(),
receiverOptions.getMaxAutoLockRenewalDuration(), connectionProcessor.getRetryOptions(),
(token, associatedLinkName) -> renewMessageLock(token, associatedLinkName));
if (consumer.compareAndSet(null, newConsumer)) {
return newConsumer;
} else {
newConsumer.close();
return consumer.get();
}
}
/**
* @return receiver options set by user;
*/
ReceiverOptions getReceiverOptions() {
return receiverOptions;
}
/**
* Renews the message lock, and updates its value in the container.
*/
private Mono<Instant> renewMessageLock(String lockToken, String linkName) {
return connectionProcessor
.flatMap(connection -> connection.getManagementNode(entityPath, entityType))
.flatMap(serviceBusManagementNode ->
serviceBusManagementNode.renewMessageLock(lockToken, linkName));
}
/**
* If the receiver has not connected via {@link
* the management node.
*
* @return The name of the receive link, or null of it has not connected via a receive link.
*/
private String getLinkName(String sessionId) {
if (unnamedSessionManager != null && !CoreUtils.isNullOrEmpty(sessionId)) {
return unnamedSessionManager.getLinkName(sessionId);
} else if (!CoreUtils.isNullOrEmpty(sessionId) && !receiverOptions.isSessionReceiver()) {
return null;
} else {
final ServiceBusAsyncConsumer existing = consumer.get();
return existing != null ? existing.getLinkName() : null;
}
}
} |
This isn't correct? It should be one or the other. | public static void main(String[] args) {
final AtomicBoolean isRunning = new AtomicBoolean(true);
Mono.delay(Duration.ofMinutes(2)).subscribe(index -> {
System.out.println("2 minutes has elapsed, stopping receive loop.");
isRunning.set(false);
});
String connectionString = "Endpoint={fully-qualified-namespace};SharedAccessKeyName={policy-name};"
+ "SharedAccessKey={key}";
ServiceBusReceiverClient receiver = new ServiceBusClientBuilder()
.connectionString(connectionString)
.sessionReceiver()
.sessionId("greetings")
.queueName("<<queue-name>>")
.buildClient();
while (isRunning.get()) {
IterableStream<ServiceBusReceivedMessageContext> messages = receiver.receive(10, Duration.ofSeconds(30));
for (ServiceBusReceivedMessageContext context : messages) {
System.out.println("Processing message from session: " + context.getSessionId());
ServiceBusReceivedMessage message = context.getMessage();
boolean isSuccessfullyProcessed = processMessage(message);
if (isSuccessfullyProcessed) {
receiver.complete(message.getLockToken());
receiver.complete(message.getLockToken(), message.getSessionId());
} else {
receiver.abandon(message.getLockToken(), null, message.getSessionId());
}
}
}
receiver.close();
} | receiver.complete(message.getLockToken()); | public static void main(String[] args) {
final AtomicBoolean isRunning = new AtomicBoolean(true);
Mono.delay(Duration.ofMinutes(2)).subscribe(index -> {
System.out.println("2 minutes has elapsed, stopping receive loop.");
isRunning.set(false);
});
String connectionString = "Endpoint={fully-qualified-namespace};SharedAccessKeyName={policy-name};"
+ "SharedAccessKey={key}";
ServiceBusReceiverClient receiver = new ServiceBusClientBuilder()
.connectionString(connectionString)
.sessionReceiver()
.sessionId("greetings")
.queueName("<<queue-name>>")
.buildClient();
while (isRunning.get()) {
IterableStream<ServiceBusReceivedMessageContext> messages = receiver.receive(10, Duration.ofSeconds(30));
for (ServiceBusReceivedMessageContext context : messages) {
System.out.println("Processing message from session: " + context.getSessionId());
ServiceBusReceivedMessage message = context.getMessage();
boolean isSuccessfullyProcessed = processMessage(message);
if (isSuccessfullyProcessed) {
receiver.complete(message.getLockToken(), message.getSessionId());
} else {
receiver.abandon(message.getLockToken(), null, message.getSessionId());
}
}
}
receiver.close();
} | class ReceiveNamedSessionSample {
/**
* Main method to invoke this demo on how to receive messages from a session with id "greetings" in an Azure Service
* Bus Queue.
*
* @param args Unused arguments to the program.
*/
private static boolean processMessage(ServiceBusReceivedMessage message) {
System.out.println("Processing message: " + message.getMessageId());
return true;
}
} | class ReceiveNamedSessionSample {
/**
* Main method to invoke this demo on how to receive messages from a session with id "greetings" in an Azure Service
* Bus Queue.
*
* @param args Unused arguments to the program.
*/
private static boolean processMessage(ServiceBusReceivedMessage message) {
System.out.println("Processing message: " + message.getMessageId());
return true;
}
} |
Should this throw `UnsupportedOperationException` if it isn't supported? | public boolean isSymbolicLink() {
return false;
} | return false; | public boolean isSymbolicLink() {
return false;
} | class AzureBasicFileAttributes implements BasicFileAttributes {
private final ClientLogger logger = new ClientLogger(AzureBasicFileAttributes.class);
static final Set<String> ATTRIBUTE_STRINGS;
static {
Set<String> set = new HashSet<>();
set.add("lastModifiedTime");
set.add("isRegularFile");
set.add("isDirectory");
set.add("isSymbolicLink");
set.add("isOther");
set.add("size");
set.add("creationTime");
ATTRIBUTE_STRINGS = Collections.unmodifiableSet(set);
}
private final BlobProperties properties;
/*
There are some work-arounds we could do to try to accommodate virtual directories such as making a checkDirStatus
call before or after getProperties to throw an appropriate error or adding an isVirtualDirectory method. However,
the former wastes network time only to throw a slightly more specific error when we will throw on 404 anyway. The
latter introduces virtual directories into the actual code path/api surface. While we are clear in our docs about
the possible pitfalls of virtual directories, and customers should be aware of it, they shouldn't have to code
against it. Therefore, we fall back to documenting that reading attributes on a virtual directory will throw. And
who reads attributes on a directory anyway?
*/
AzureBasicFileAttributes(Path path) throws IOException {
try {
this.properties = new AzureResource(path).getBlobClient().getProperties();
} catch (BlobStorageException e) {
throw LoggingUtility.logError(logger, new IOException(e));
}
}
/**
* {@inheritDoc}
*/
@Override
public FileTime lastModifiedTime() {
return FileTime.from(properties.getLastModified().toInstant());
}
/**
* Unsupported.
* {@inheritDoc}
*/
@Override
public FileTime lastAccessTime() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public FileTime creationTime() {
return FileTime.from(properties.getCreationTime().toInstant());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRegularFile() {
return !this.properties.getMetadata().getOrDefault(AzureResource.DIR_METADATA_MARKER, "false").equals("true");
}
/**
* {@inheritDoc}
* <p>
* Will only return true if the directory is a concrete directory. See
* {@link AzureFileSystemProvider
* concrete directories.
*/
@Override
public boolean isDirectory() {
return !this.isRegularFile();
}
/**
* @return false. Symbolic links are not supported.
*/
@Override
/**
* @return false
*/
@Override
public boolean isOther() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long size() {
return properties.getBlobSize();
}
/**
* Unsupported.
* {@inheritDoc}
*/
@Override
public Object fileKey() {
throw new UnsupportedOperationException();
}
} | class AzureBasicFileAttributes implements BasicFileAttributes {
private final ClientLogger logger = new ClientLogger(AzureBasicFileAttributes.class);
static final Set<String> ATTRIBUTE_STRINGS;
static {
Set<String> set = new HashSet<>();
set.add("lastModifiedTime");
set.add("isRegularFile");
set.add("isDirectory");
set.add("isSymbolicLink");
set.add("isOther");
set.add("size");
set.add("creationTime");
ATTRIBUTE_STRINGS = Collections.unmodifiableSet(set);
}
private final BlobProperties properties;
/*
There are some work-arounds we could do to try to accommodate virtual directories such as making a checkDirStatus
call before or after getProperties to throw an appropriate error or adding an isVirtualDirectory method. However,
the former wastes network time only to throw a slightly more specific error when we will throw on 404 anyway. The
latter introduces virtual directories into the actual code path/api surface. While we are clear in our docs about
the possible pitfalls of virtual directories, and customers should be aware of it, they shouldn't have to code
against it. Therefore, we fall back to documenting that reading attributes on a virtual directory will throw.
*/
AzureBasicFileAttributes(Path path) throws IOException {
try {
this.properties = new AzureResource(path).getBlobClient().getProperties();
} catch (BlobStorageException e) {
throw LoggingUtility.logError(logger, new IOException(e));
}
}
/**
* {@inheritDoc}
*/
@Override
public FileTime lastModifiedTime() {
return FileTime.from(properties.getLastModified().toInstant());
}
/**
* Unsupported.
* @throws UnsupportedOperationException Operation not supported.
* {@inheritDoc}
*/
@Override
public FileTime lastAccessTime() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public FileTime creationTime() {
return FileTime.from(properties.getCreationTime().toInstant());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRegularFile() {
return !this.properties.getMetadata().getOrDefault(AzureResource.DIR_METADATA_MARKER, "false").equals("true");
}
/**
* {@inheritDoc}
* <p>
* Will only return true if the directory is a concrete directory. See
* {@link AzureFileSystemProvider
* concrete directories.
*/
@Override
public boolean isDirectory() {
return !this.isRegularFile();
}
/**
* @return false. Symbolic links are not supported.
*/
@Override
/**
* @return false
*/
@Override
public boolean isOther() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long size() {
return properties.getBlobSize();
}
/**
* Unsupported.
* @throws UnsupportedOperationException Operation not supported.
* {@inheritDoc}
*/
@Override
public Object fileKey() {
throw new UnsupportedOperationException();
}
} |
I'm not sure. In some sense it's just always false, right? But maybe it makes it more clear that sym links aren't supported if I throw? | public boolean isSymbolicLink() {
return false;
} | return false; | public boolean isSymbolicLink() {
return false;
} | class AzureBasicFileAttributes implements BasicFileAttributes {
private final ClientLogger logger = new ClientLogger(AzureBasicFileAttributes.class);
static final Set<String> ATTRIBUTE_STRINGS;
static {
Set<String> set = new HashSet<>();
set.add("lastModifiedTime");
set.add("isRegularFile");
set.add("isDirectory");
set.add("isSymbolicLink");
set.add("isOther");
set.add("size");
set.add("creationTime");
ATTRIBUTE_STRINGS = Collections.unmodifiableSet(set);
}
private final BlobProperties properties;
/*
There are some work-arounds we could do to try to accommodate virtual directories such as making a checkDirStatus
call before or after getProperties to throw an appropriate error or adding an isVirtualDirectory method. However,
the former wastes network time only to throw a slightly more specific error when we will throw on 404 anyway. The
latter introduces virtual directories into the actual code path/api surface. While we are clear in our docs about
the possible pitfalls of virtual directories, and customers should be aware of it, they shouldn't have to code
against it. Therefore, we fall back to documenting that reading attributes on a virtual directory will throw. And
who reads attributes on a directory anyway?
*/
AzureBasicFileAttributes(Path path) throws IOException {
try {
this.properties = new AzureResource(path).getBlobClient().getProperties();
} catch (BlobStorageException e) {
throw LoggingUtility.logError(logger, new IOException(e));
}
}
/**
* {@inheritDoc}
*/
@Override
public FileTime lastModifiedTime() {
return FileTime.from(properties.getLastModified().toInstant());
}
/**
* Unsupported.
* {@inheritDoc}
*/
@Override
public FileTime lastAccessTime() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public FileTime creationTime() {
return FileTime.from(properties.getCreationTime().toInstant());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRegularFile() {
return !this.properties.getMetadata().getOrDefault(AzureResource.DIR_METADATA_MARKER, "false").equals("true");
}
/**
* {@inheritDoc}
* <p>
* Will only return true if the directory is a concrete directory. See
* {@link AzureFileSystemProvider
* concrete directories.
*/
@Override
public boolean isDirectory() {
return !this.isRegularFile();
}
/**
* @return false. Symbolic links are not supported.
*/
@Override
/**
* @return false
*/
@Override
public boolean isOther() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long size() {
return properties.getBlobSize();
}
/**
* Unsupported.
* {@inheritDoc}
*/
@Override
public Object fileKey() {
throw new UnsupportedOperationException();
}
} | class AzureBasicFileAttributes implements BasicFileAttributes {
private final ClientLogger logger = new ClientLogger(AzureBasicFileAttributes.class);
static final Set<String> ATTRIBUTE_STRINGS;
static {
Set<String> set = new HashSet<>();
set.add("lastModifiedTime");
set.add("isRegularFile");
set.add("isDirectory");
set.add("isSymbolicLink");
set.add("isOther");
set.add("size");
set.add("creationTime");
ATTRIBUTE_STRINGS = Collections.unmodifiableSet(set);
}
private final BlobProperties properties;
/*
There are some work-arounds we could do to try to accommodate virtual directories such as making a checkDirStatus
call before or after getProperties to throw an appropriate error or adding an isVirtualDirectory method. However,
the former wastes network time only to throw a slightly more specific error when we will throw on 404 anyway. The
latter introduces virtual directories into the actual code path/api surface. While we are clear in our docs about
the possible pitfalls of virtual directories, and customers should be aware of it, they shouldn't have to code
against it. Therefore, we fall back to documenting that reading attributes on a virtual directory will throw.
*/
AzureBasicFileAttributes(Path path) throws IOException {
try {
this.properties = new AzureResource(path).getBlobClient().getProperties();
} catch (BlobStorageException e) {
throw LoggingUtility.logError(logger, new IOException(e));
}
}
/**
* {@inheritDoc}
*/
@Override
public FileTime lastModifiedTime() {
return FileTime.from(properties.getLastModified().toInstant());
}
/**
* Unsupported.
* @throws UnsupportedOperationException Operation not supported.
* {@inheritDoc}
*/
@Override
public FileTime lastAccessTime() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public FileTime creationTime() {
return FileTime.from(properties.getCreationTime().toInstant());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRegularFile() {
return !this.properties.getMetadata().getOrDefault(AzureResource.DIR_METADATA_MARKER, "false").equals("true");
}
/**
* {@inheritDoc}
* <p>
* Will only return true if the directory is a concrete directory. See
* {@link AzureFileSystemProvider
* concrete directories.
*/
@Override
public boolean isDirectory() {
return !this.isRegularFile();
}
/**
* @return false. Symbolic links are not supported.
*/
@Override
/**
* @return false
*/
@Override
public boolean isOther() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long size() {
return properties.getBlobSize();
}
/**
* Unsupported.
* @throws UnsupportedOperationException Operation not supported.
* {@inheritDoc}
*/
@Override
public Object fileKey() {
throw new UnsupportedOperationException();
}
} |
I think returning always `false` is correct. Throwing `UnsupportedOperationException` would mean "I cannot check if file is symlink or not` in plain English. I believe our file system doesn't have symlinks so all files are real by definition, therefore we know how to check if they're symlinks or not. | public boolean isSymbolicLink() {
return false;
} | return false; | public boolean isSymbolicLink() {
return false;
} | class AzureBasicFileAttributes implements BasicFileAttributes {
private final ClientLogger logger = new ClientLogger(AzureBasicFileAttributes.class);
static final Set<String> ATTRIBUTE_STRINGS;
static {
Set<String> set = new HashSet<>();
set.add("lastModifiedTime");
set.add("isRegularFile");
set.add("isDirectory");
set.add("isSymbolicLink");
set.add("isOther");
set.add("size");
set.add("creationTime");
ATTRIBUTE_STRINGS = Collections.unmodifiableSet(set);
}
private final BlobProperties properties;
/*
There are some work-arounds we could do to try to accommodate virtual directories such as making a checkDirStatus
call before or after getProperties to throw an appropriate error or adding an isVirtualDirectory method. However,
the former wastes network time only to throw a slightly more specific error when we will throw on 404 anyway. The
latter introduces virtual directories into the actual code path/api surface. While we are clear in our docs about
the possible pitfalls of virtual directories, and customers should be aware of it, they shouldn't have to code
against it. Therefore, we fall back to documenting that reading attributes on a virtual directory will throw. And
who reads attributes on a directory anyway?
*/
AzureBasicFileAttributes(Path path) throws IOException {
try {
this.properties = new AzureResource(path).getBlobClient().getProperties();
} catch (BlobStorageException e) {
throw LoggingUtility.logError(logger, new IOException(e));
}
}
/**
* {@inheritDoc}
*/
@Override
public FileTime lastModifiedTime() {
return FileTime.from(properties.getLastModified().toInstant());
}
/**
* Unsupported.
* {@inheritDoc}
*/
@Override
public FileTime lastAccessTime() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public FileTime creationTime() {
return FileTime.from(properties.getCreationTime().toInstant());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRegularFile() {
return !this.properties.getMetadata().getOrDefault(AzureResource.DIR_METADATA_MARKER, "false").equals("true");
}
/**
* {@inheritDoc}
* <p>
* Will only return true if the directory is a concrete directory. See
* {@link AzureFileSystemProvider
* concrete directories.
*/
@Override
public boolean isDirectory() {
return !this.isRegularFile();
}
/**
* @return false. Symbolic links are not supported.
*/
@Override
/**
* @return false
*/
@Override
public boolean isOther() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long size() {
return properties.getBlobSize();
}
/**
* Unsupported.
* {@inheritDoc}
*/
@Override
public Object fileKey() {
throw new UnsupportedOperationException();
}
} | class AzureBasicFileAttributes implements BasicFileAttributes {
private final ClientLogger logger = new ClientLogger(AzureBasicFileAttributes.class);
static final Set<String> ATTRIBUTE_STRINGS;
static {
Set<String> set = new HashSet<>();
set.add("lastModifiedTime");
set.add("isRegularFile");
set.add("isDirectory");
set.add("isSymbolicLink");
set.add("isOther");
set.add("size");
set.add("creationTime");
ATTRIBUTE_STRINGS = Collections.unmodifiableSet(set);
}
private final BlobProperties properties;
/*
There are some work-arounds we could do to try to accommodate virtual directories such as making a checkDirStatus
call before or after getProperties to throw an appropriate error or adding an isVirtualDirectory method. However,
the former wastes network time only to throw a slightly more specific error when we will throw on 404 anyway. The
latter introduces virtual directories into the actual code path/api surface. While we are clear in our docs about
the possible pitfalls of virtual directories, and customers should be aware of it, they shouldn't have to code
against it. Therefore, we fall back to documenting that reading attributes on a virtual directory will throw.
*/
AzureBasicFileAttributes(Path path) throws IOException {
try {
this.properties = new AzureResource(path).getBlobClient().getProperties();
} catch (BlobStorageException e) {
throw LoggingUtility.logError(logger, new IOException(e));
}
}
/**
* {@inheritDoc}
*/
@Override
public FileTime lastModifiedTime() {
return FileTime.from(properties.getLastModified().toInstant());
}
/**
* Unsupported.
* @throws UnsupportedOperationException Operation not supported.
* {@inheritDoc}
*/
@Override
public FileTime lastAccessTime() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public FileTime creationTime() {
return FileTime.from(properties.getCreationTime().toInstant());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRegularFile() {
return !this.properties.getMetadata().getOrDefault(AzureResource.DIR_METADATA_MARKER, "false").equals("true");
}
/**
* {@inheritDoc}
* <p>
* Will only return true if the directory is a concrete directory. See
* {@link AzureFileSystemProvider
* concrete directories.
*/
@Override
public boolean isDirectory() {
return !this.isRegularFile();
}
/**
* @return false. Symbolic links are not supported.
*/
@Override
/**
* @return false
*/
@Override
public boolean isOther() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long size() {
return properties.getBlobSize();
}
/**
* Unsupported.
* @throws UnsupportedOperationException Operation not supported.
* {@inheritDoc}
*/
@Override
public Object fileKey() {
throw new UnsupportedOperationException();
}
} |
Might want to add @throws to javadocs for unsupported apis. | public FileTime lastAccessTime() {
throw new UnsupportedOperationException();
} | throw new UnsupportedOperationException(); | public FileTime lastAccessTime() {
throw new UnsupportedOperationException();
} | class AzureBasicFileAttributes implements BasicFileAttributes {
private final ClientLogger logger = new ClientLogger(AzureBasicFileAttributes.class);
static final Set<String> ATTRIBUTE_STRINGS;
static {
Set<String> set = new HashSet<>();
set.add("lastModifiedTime");
set.add("isRegularFile");
set.add("isDirectory");
set.add("isSymbolicLink");
set.add("isOther");
set.add("size");
set.add("creationTime");
ATTRIBUTE_STRINGS = Collections.unmodifiableSet(set);
}
private final BlobProperties properties;
/*
There are some work-arounds we could do to try to accommodate virtual directories such as making a checkDirStatus
call before or after getProperties to throw an appropriate error or adding an isVirtualDirectory method. However,
the former wastes network time only to throw a slightly more specific error when we will throw on 404 anyway. The
latter introduces virtual directories into the actual code path/api surface. While we are clear in our docs about
the possible pitfalls of virtual directories, and customers should be aware of it, they shouldn't have to code
against it. Therefore, we fall back to documenting that reading attributes on a virtual directory will throw. And
who reads attributes on a directory anyway?
*/
AzureBasicFileAttributes(Path path) throws IOException {
try {
this.properties = new AzureResource(path).getBlobClient().getProperties();
} catch (BlobStorageException e) {
throw LoggingUtility.logError(logger, new IOException(e));
}
}
/**
* {@inheritDoc}
*/
@Override
public FileTime lastModifiedTime() {
return FileTime.from(properties.getLastModified().toInstant());
}
/**
* Unsupported.
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public FileTime creationTime() {
return FileTime.from(properties.getCreationTime().toInstant());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRegularFile() {
return !this.properties.getMetadata().getOrDefault(AzureResource.DIR_METADATA_MARKER, "false").equals("true");
}
/**
* {@inheritDoc}
* <p>
* Will only return true if the directory is a concrete directory. See
* {@link AzureFileSystemProvider
* concrete directories.
*/
@Override
public boolean isDirectory() {
return !this.isRegularFile();
}
/**
* @return false. Symbolic links are not supported.
*/
@Override
public boolean isSymbolicLink() {
return false;
}
/**
* @return false
*/
@Override
public boolean isOther() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long size() {
return properties.getBlobSize();
}
/**
* Unsupported.
* {@inheritDoc}
*/
@Override
public Object fileKey() {
throw new UnsupportedOperationException();
}
} | class AzureBasicFileAttributes implements BasicFileAttributes {
private final ClientLogger logger = new ClientLogger(AzureBasicFileAttributes.class);
static final Set<String> ATTRIBUTE_STRINGS;
static {
Set<String> set = new HashSet<>();
set.add("lastModifiedTime");
set.add("isRegularFile");
set.add("isDirectory");
set.add("isSymbolicLink");
set.add("isOther");
set.add("size");
set.add("creationTime");
ATTRIBUTE_STRINGS = Collections.unmodifiableSet(set);
}
private final BlobProperties properties;
/*
There are some work-arounds we could do to try to accommodate virtual directories such as making a checkDirStatus
call before or after getProperties to throw an appropriate error or adding an isVirtualDirectory method. However,
the former wastes network time only to throw a slightly more specific error when we will throw on 404 anyway. The
latter introduces virtual directories into the actual code path/api surface. While we are clear in our docs about
the possible pitfalls of virtual directories, and customers should be aware of it, they shouldn't have to code
against it. Therefore, we fall back to documenting that reading attributes on a virtual directory will throw.
*/
AzureBasicFileAttributes(Path path) throws IOException {
try {
this.properties = new AzureResource(path).getBlobClient().getProperties();
} catch (BlobStorageException e) {
throw LoggingUtility.logError(logger, new IOException(e));
}
}
/**
* {@inheritDoc}
*/
@Override
public FileTime lastModifiedTime() {
return FileTime.from(properties.getLastModified().toInstant());
}
/**
* Unsupported.
* @throws UnsupportedOperationException Operation not supported.
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public FileTime creationTime() {
return FileTime.from(properties.getCreationTime().toInstant());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRegularFile() {
return !this.properties.getMetadata().getOrDefault(AzureResource.DIR_METADATA_MARKER, "false").equals("true");
}
/**
* {@inheritDoc}
* <p>
* Will only return true if the directory is a concrete directory. See
* {@link AzureFileSystemProvider
* concrete directories.
*/
@Override
public boolean isDirectory() {
return !this.isRegularFile();
}
/**
* @return false. Symbolic links are not supported.
*/
@Override
public boolean isSymbolicLink() {
return false;
}
/**
* @return false
*/
@Override
public boolean isOther() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long size() {
return properties.getBlobSize();
}
/**
* Unsupported.
* @throws UnsupportedOperationException Operation not supported.
* {@inheritDoc}
*/
@Override
public Object fileKey() {
throw new UnsupportedOperationException();
}
} |
why are we defining new public property for something which is already available? I thought the plan was to reuse `TestConfiguration.HOST` and not redefine a constant for the same thing. 1) on the CI the value for `TestConfiguration.HOST` will be populated by the CI. 2) in local debugging that can be populated from a properties file from user home. This breaks both 1) and 2). | public void createAadTokenCredential() throws InterruptedException {
CosmosAsyncDatabase db = null;
CosmosAsyncClient cosmosAsyncClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.COSMOS_EMULATOR_HOST)
.key(TestConfigurations.COSMOS_EMULATOR_KEY)
.buildAsyncClient();
String containerName = UUID.randomUUID().toString();
try {
CosmosDatabaseResponse databaseResponse = cosmosAsyncClient.createDatabase(databaseId).block();
CosmosContainerResponse containerResponse = cosmosAsyncClient.getDatabase(databaseId).createContainer(containerName, PARTITION_KEY_PATH).block();
} finally {
if (cosmosAsyncClient != null) {
safeClose(cosmosAsyncClient);
}
}
Thread.sleep(TIMEOUT);
TokenCredential emulatorCredential = new AadSimpleEmulatorTokenCredential(TestConfigurations.COSMOS_EMULATOR_KEY);
CosmosAsyncClient cosmosAadClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.COSMOS_EMULATOR_HOST)
.credential(emulatorCredential)
.buildAsyncClient();
try {
db = cosmosAadClient.getDatabase(databaseId).read()
.map(dabaseResponse -> {
CosmosAsyncDatabase database = cosmosAadClient.getDatabase(dabaseResponse.getProperties().getId());
log.info("Found database {} with {}", database.getId(), dabaseResponse.getProperties().getETag());
return database;
}).block();
assert db != null;
CosmosAsyncContainer container = db.getContainer(containerName).read()
.map(cosmosContainerResponse -> {
CosmosAsyncContainer container1 = cosmosAadClient.getDatabase(databaseId).getContainer(cosmosContainerResponse.getProperties().getId());
log.info("Found container {} with {}", container1.getId(), cosmosContainerResponse.getProperties().getETag());
return container1;
}).block();
assert container != null;
String itemName = UUID.randomUUID().toString();
String partitionKeyValue = UUID.randomUUID().toString();
ItemSample itemSample = getDocumentDefinition(itemName, partitionKeyValue);
CosmosItemResponse<ItemSample> cosmosItemResponse = container.createItem(itemSample, new CosmosItemRequestOptions()).block();
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
InternalObjectNode item = container
.readItem(itemName, new PartitionKey(partitionKeyValue), options, InternalObjectNode.class)
.map(CosmosItemResponse::getItem)
.map(jsonNode -> {
log.info("Found item with content: " + jsonNode.toString());
return jsonNode;
}).block();
assert item != null;
CosmosQueryRequestOptions requestOptions = new CosmosQueryRequestOptions();
CosmosPagedFlux<JsonNode> queryPagedFlux = container
.queryItems("SELECT * FROM c", requestOptions, JsonNode.class);
List<JsonNode> feedResponse = queryPagedFlux.byPage()
.flatMap(jsonNodeFeedResponse -> {
return Flux.fromIterable(jsonNodeFeedResponse.getResults());
}).map(jsonNode -> {
log.info("Found item with content: " + jsonNode.toString());
return jsonNode;
})
.collectList()
.block();
container.deleteItem(item.getId(), new PartitionKey(partitionKeyValue));
} finally {
if (db != null) {
cosmosAsyncClient.getDatabase(databaseId).delete().block();
}
if (cosmosAadClient != null) {
safeClose(cosmosAadClient);
}
}
Thread.sleep(SHUTDOWN_TIMEOUT);
} | .endpoint(TestConfigurations.COSMOS_EMULATOR_HOST) | public void createAadTokenCredential() throws InterruptedException {
CosmosAsyncDatabase db = null;
CosmosAsyncClient cosmosAsyncClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.buildAsyncClient();
String containerName = UUID.randomUUID().toString();
try {
CosmosDatabaseResponse databaseResponse = cosmosAsyncClient.createDatabase(databaseId).block();
CosmosContainerResponse containerResponse = cosmosAsyncClient.getDatabase(databaseId).createContainer(containerName, PARTITION_KEY_PATH).block();
} finally {
if (cosmosAsyncClient != null) {
safeClose(cosmosAsyncClient);
}
}
Thread.sleep(TIMEOUT);
TokenCredential emulatorCredential = new AadSimpleEmulatorTokenCredential(TestConfigurations.MASTER_KEY);
CosmosAsyncClient cosmosAadClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.credential(emulatorCredential)
.buildAsyncClient();
try {
db = cosmosAadClient.getDatabase(databaseId).read()
.map(dabaseResponse -> {
CosmosAsyncDatabase database = cosmosAadClient.getDatabase(dabaseResponse.getProperties().getId());
log.info("Found database {} with {}", database.getId(), dabaseResponse.getProperties().getETag());
return database;
}).block();
assert db != null;
CosmosAsyncContainer container = db.getContainer(containerName).read()
.map(cosmosContainerResponse -> {
CosmosAsyncContainer container1 = cosmosAadClient.getDatabase(databaseId).getContainer(cosmosContainerResponse.getProperties().getId());
log.info("Found container {} with {}", container1.getId(), cosmosContainerResponse.getProperties().getETag());
return container1;
}).block();
assert container != null;
String itemName = UUID.randomUUID().toString();
String partitionKeyValue = UUID.randomUUID().toString();
ItemSample itemSample = getDocumentDefinition(itemName, partitionKeyValue);
CosmosItemResponse<ItemSample> cosmosItemResponse = container.createItem(itemSample, new CosmosItemRequestOptions()).block();
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
InternalObjectNode item = container
.readItem(itemName, new PartitionKey(partitionKeyValue), options, InternalObjectNode.class)
.map(CosmosItemResponse::getItem)
.map(jsonNode -> {
log.info("Found item with content: " + jsonNode.toString());
return jsonNode;
}).block();
assert item != null;
CosmosQueryRequestOptions requestOptions = new CosmosQueryRequestOptions();
CosmosPagedFlux<JsonNode> queryPagedFlux = container
.queryItems("SELECT * FROM c", requestOptions, JsonNode.class);
List<JsonNode> feedResponse = queryPagedFlux.byPage()
.flatMap(jsonNodeFeedResponse -> {
return Flux.fromIterable(jsonNodeFeedResponse.getResults());
}).map(jsonNode -> {
log.info("Found item with content: " + jsonNode.toString());
return jsonNode;
})
.collectList()
.block();
container.deleteItem(item.getId(), new PartitionKey(partitionKeyValue));
} finally {
if (db != null) {
cosmosAsyncClient.getDatabase(databaseId).delete().block();
}
if (cosmosAadClient != null) {
safeClose(cosmosAadClient);
}
}
Thread.sleep(SHUTDOWN_TIMEOUT);
} | class AadAuthorizationTests extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(AadAuthorizationTests.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private final static String PARTITION_KEY_PATH = "/mypk";
private final String databaseId = CosmosDatabaseForTest.generateId();
protected AadAuthorizationTests() {
}
@Test(groups = { "emulator" }, timeOut = 10 * TIMEOUT)
private ItemSample getDocumentDefinition(String itemId, String partitionKeyValue) {
ItemSample itemSample = new ItemSample();
itemSample.id = itemId;
itemSample.mypk = partitionKeyValue;
itemSample.sgmts = "[[6519456, 1471916863], [2498434, 1455671440]]";
return itemSample;
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
}
class AadSimpleEmulatorTokenCredential implements TokenCredential {
private final String emulatorKeyEncoded;
private final String AAD_HEADER_COSMOS_EMULATOR = "{\"typ\":\"JWT\",\"alg\":\"RS256\",\"x5t\":\"CosmosEmulatorPrimaryMaster\",\"kid\":\"CosmosEmulatorPrimaryMaster\"}";
private final String AAD_CLAIM_COSMOS_EMULATOR_FORMAT = "{\"aud\":\"https:
public AadSimpleEmulatorTokenCredential(String emulatorKey) {
if (emulatorKey == null || emulatorKey.isEmpty()) {
throw new IllegalArgumentException("emulatorKey");
}
this.emulatorKeyEncoded = Utils.encodeUrlBase64String(emulatorKey.getBytes());
}
@Override
public Mono<AccessToken> getToken(TokenRequestContext tokenRequestContext) {
String aadToken = emulatorKey_based_AAD_String();
return Mono.just(new AccessToken(aadToken, OffsetDateTime.now().plusHours(2)));
}
String emulatorKey_based_AAD_String() {
ZonedDateTime currentTime = ZonedDateTime.now();
String part1Encoded = Utils.encodeUrlBase64String(AAD_HEADER_COSMOS_EMULATOR.getBytes());
String part2 = String.format(AAD_CLAIM_COSMOS_EMULATOR_FORMAT,
currentTime.toEpochSecond(),
currentTime.toEpochSecond(),
currentTime.plusHours(2).toEpochSecond());
String part2Encoded = Utils.encodeUrlBase64String(part2.getBytes());
return part1Encoded + "." + part2Encoded + "." + this.emulatorKeyEncoded;
}
}
class ItemSample {
public String id;
public String mypk;
public String sgmts;
public String toString() {
try {
return OBJECT_MAPPER.writeValueAsString(this);
} catch (JsonProcessingException ex) {
ex.printStackTrace();
throw new RuntimeException("Unexpected object mapping exception");
}
}
}
} | class AadAuthorizationTests extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(AadAuthorizationTests.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private final static String PARTITION_KEY_PATH = "/mypk";
private final String databaseId = CosmosDatabaseForTest.generateId();
protected AadAuthorizationTests() {
}
private ItemSample getDocumentDefinition(String itemId, String partitionKeyValue) {
ItemSample itemSample = new ItemSample();
itemSample.id = itemId;
itemSample.mypk = partitionKeyValue;
itemSample.sgmts = "[[6519456, 1471916863], [2498434, 1455671440]]";
return itemSample;
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
}
class AadSimpleEmulatorTokenCredential implements TokenCredential {
private final String emulatorKeyEncoded;
private final String AAD_HEADER_COSMOS_EMULATOR = "{\"typ\":\"JWT\",\"alg\":\"RS256\",\"x5t\":\"CosmosEmulatorPrimaryMaster\",\"kid\":\"CosmosEmulatorPrimaryMaster\"}";
private final String AAD_CLAIM_COSMOS_EMULATOR_FORMAT = "{\"aud\":\"https:
public AadSimpleEmulatorTokenCredential(String emulatorKey) {
if (emulatorKey == null || emulatorKey.isEmpty()) {
throw new IllegalArgumentException("emulatorKey");
}
this.emulatorKeyEncoded = Utils.encodeUrlBase64String(emulatorKey.getBytes());
}
@Override
public Mono<AccessToken> getToken(TokenRequestContext tokenRequestContext) {
String aadToken = emulatorKey_based_AAD_String();
return Mono.just(new AccessToken(aadToken, OffsetDateTime.now().plusHours(2)));
}
String emulatorKey_based_AAD_String() {
ZonedDateTime currentTime = ZonedDateTime.now();
String part1Encoded = Utils.encodeUrlBase64String(AAD_HEADER_COSMOS_EMULATOR.getBytes());
String part2 = String.format(AAD_CLAIM_COSMOS_EMULATOR_FORMAT,
currentTime.toEpochSecond(),
currentTime.toEpochSecond(),
currentTime.plusHours(2).toEpochSecond());
String part2Encoded = Utils.encodeUrlBase64String(part2.getBytes());
return part1Encoded + "." + part2Encoded + "." + this.emulatorKeyEncoded;
}
}
class ItemSample {
public String id;
public String mypk;
public String sgmts;
public String toString() {
try {
return OBJECT_MAPPER.writeValueAsString(this);
} catch (JsonProcessingException ex) {
ex.printStackTrace();
throw new RuntimeException("Unexpected object mapping exception");
}
}
}
} |
This is an emulator only test which it cannot be run against prod endpoint. | public void createAadTokenCredential() throws InterruptedException {
CosmosAsyncDatabase db = null;
CosmosAsyncClient cosmosAsyncClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.COSMOS_EMULATOR_HOST)
.key(TestConfigurations.COSMOS_EMULATOR_KEY)
.buildAsyncClient();
String containerName = UUID.randomUUID().toString();
try {
CosmosDatabaseResponse databaseResponse = cosmosAsyncClient.createDatabase(databaseId).block();
CosmosContainerResponse containerResponse = cosmosAsyncClient.getDatabase(databaseId).createContainer(containerName, PARTITION_KEY_PATH).block();
} finally {
if (cosmosAsyncClient != null) {
safeClose(cosmosAsyncClient);
}
}
Thread.sleep(TIMEOUT);
TokenCredential emulatorCredential = new AadSimpleEmulatorTokenCredential(TestConfigurations.COSMOS_EMULATOR_KEY);
CosmosAsyncClient cosmosAadClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.COSMOS_EMULATOR_HOST)
.credential(emulatorCredential)
.buildAsyncClient();
try {
db = cosmosAadClient.getDatabase(databaseId).read()
.map(dabaseResponse -> {
CosmosAsyncDatabase database = cosmosAadClient.getDatabase(dabaseResponse.getProperties().getId());
log.info("Found database {} with {}", database.getId(), dabaseResponse.getProperties().getETag());
return database;
}).block();
assert db != null;
CosmosAsyncContainer container = db.getContainer(containerName).read()
.map(cosmosContainerResponse -> {
CosmosAsyncContainer container1 = cosmosAadClient.getDatabase(databaseId).getContainer(cosmosContainerResponse.getProperties().getId());
log.info("Found container {} with {}", container1.getId(), cosmosContainerResponse.getProperties().getETag());
return container1;
}).block();
assert container != null;
String itemName = UUID.randomUUID().toString();
String partitionKeyValue = UUID.randomUUID().toString();
ItemSample itemSample = getDocumentDefinition(itemName, partitionKeyValue);
CosmosItemResponse<ItemSample> cosmosItemResponse = container.createItem(itemSample, new CosmosItemRequestOptions()).block();
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
InternalObjectNode item = container
.readItem(itemName, new PartitionKey(partitionKeyValue), options, InternalObjectNode.class)
.map(CosmosItemResponse::getItem)
.map(jsonNode -> {
log.info("Found item with content: " + jsonNode.toString());
return jsonNode;
}).block();
assert item != null;
CosmosQueryRequestOptions requestOptions = new CosmosQueryRequestOptions();
CosmosPagedFlux<JsonNode> queryPagedFlux = container
.queryItems("SELECT * FROM c", requestOptions, JsonNode.class);
List<JsonNode> feedResponse = queryPagedFlux.byPage()
.flatMap(jsonNodeFeedResponse -> {
return Flux.fromIterable(jsonNodeFeedResponse.getResults());
}).map(jsonNode -> {
log.info("Found item with content: " + jsonNode.toString());
return jsonNode;
})
.collectList()
.block();
container.deleteItem(item.getId(), new PartitionKey(partitionKeyValue));
} finally {
if (db != null) {
cosmosAsyncClient.getDatabase(databaseId).delete().block();
}
if (cosmosAadClient != null) {
safeClose(cosmosAadClient);
}
}
Thread.sleep(SHUTDOWN_TIMEOUT);
} | .endpoint(TestConfigurations.COSMOS_EMULATOR_HOST) | public void createAadTokenCredential() throws InterruptedException {
CosmosAsyncDatabase db = null;
CosmosAsyncClient cosmosAsyncClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.buildAsyncClient();
String containerName = UUID.randomUUID().toString();
try {
CosmosDatabaseResponse databaseResponse = cosmosAsyncClient.createDatabase(databaseId).block();
CosmosContainerResponse containerResponse = cosmosAsyncClient.getDatabase(databaseId).createContainer(containerName, PARTITION_KEY_PATH).block();
} finally {
if (cosmosAsyncClient != null) {
safeClose(cosmosAsyncClient);
}
}
Thread.sleep(TIMEOUT);
TokenCredential emulatorCredential = new AadSimpleEmulatorTokenCredential(TestConfigurations.MASTER_KEY);
CosmosAsyncClient cosmosAadClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.credential(emulatorCredential)
.buildAsyncClient();
try {
db = cosmosAadClient.getDatabase(databaseId).read()
.map(dabaseResponse -> {
CosmosAsyncDatabase database = cosmosAadClient.getDatabase(dabaseResponse.getProperties().getId());
log.info("Found database {} with {}", database.getId(), dabaseResponse.getProperties().getETag());
return database;
}).block();
assert db != null;
CosmosAsyncContainer container = db.getContainer(containerName).read()
.map(cosmosContainerResponse -> {
CosmosAsyncContainer container1 = cosmosAadClient.getDatabase(databaseId).getContainer(cosmosContainerResponse.getProperties().getId());
log.info("Found container {} with {}", container1.getId(), cosmosContainerResponse.getProperties().getETag());
return container1;
}).block();
assert container != null;
String itemName = UUID.randomUUID().toString();
String partitionKeyValue = UUID.randomUUID().toString();
ItemSample itemSample = getDocumentDefinition(itemName, partitionKeyValue);
CosmosItemResponse<ItemSample> cosmosItemResponse = container.createItem(itemSample, new CosmosItemRequestOptions()).block();
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
InternalObjectNode item = container
.readItem(itemName, new PartitionKey(partitionKeyValue), options, InternalObjectNode.class)
.map(CosmosItemResponse::getItem)
.map(jsonNode -> {
log.info("Found item with content: " + jsonNode.toString());
return jsonNode;
}).block();
assert item != null;
CosmosQueryRequestOptions requestOptions = new CosmosQueryRequestOptions();
CosmosPagedFlux<JsonNode> queryPagedFlux = container
.queryItems("SELECT * FROM c", requestOptions, JsonNode.class);
List<JsonNode> feedResponse = queryPagedFlux.byPage()
.flatMap(jsonNodeFeedResponse -> {
return Flux.fromIterable(jsonNodeFeedResponse.getResults());
}).map(jsonNode -> {
log.info("Found item with content: " + jsonNode.toString());
return jsonNode;
})
.collectList()
.block();
container.deleteItem(item.getId(), new PartitionKey(partitionKeyValue));
} finally {
if (db != null) {
cosmosAsyncClient.getDatabase(databaseId).delete().block();
}
if (cosmosAadClient != null) {
safeClose(cosmosAadClient);
}
}
Thread.sleep(SHUTDOWN_TIMEOUT);
} | class AadAuthorizationTests extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(AadAuthorizationTests.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private final static String PARTITION_KEY_PATH = "/mypk";
private final String databaseId = CosmosDatabaseForTest.generateId();
protected AadAuthorizationTests() {
}
@Test(groups = { "emulator" }, timeOut = 10 * TIMEOUT)
private ItemSample getDocumentDefinition(String itemId, String partitionKeyValue) {
ItemSample itemSample = new ItemSample();
itemSample.id = itemId;
itemSample.mypk = partitionKeyValue;
itemSample.sgmts = "[[6519456, 1471916863], [2498434, 1455671440]]";
return itemSample;
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
}
class AadSimpleEmulatorTokenCredential implements TokenCredential {
private final String emulatorKeyEncoded;
private final String AAD_HEADER_COSMOS_EMULATOR = "{\"typ\":\"JWT\",\"alg\":\"RS256\",\"x5t\":\"CosmosEmulatorPrimaryMaster\",\"kid\":\"CosmosEmulatorPrimaryMaster\"}";
private final String AAD_CLAIM_COSMOS_EMULATOR_FORMAT = "{\"aud\":\"https:
public AadSimpleEmulatorTokenCredential(String emulatorKey) {
if (emulatorKey == null || emulatorKey.isEmpty()) {
throw new IllegalArgumentException("emulatorKey");
}
this.emulatorKeyEncoded = Utils.encodeUrlBase64String(emulatorKey.getBytes());
}
@Override
public Mono<AccessToken> getToken(TokenRequestContext tokenRequestContext) {
String aadToken = emulatorKey_based_AAD_String();
return Mono.just(new AccessToken(aadToken, OffsetDateTime.now().plusHours(2)));
}
String emulatorKey_based_AAD_String() {
ZonedDateTime currentTime = ZonedDateTime.now();
String part1Encoded = Utils.encodeUrlBase64String(AAD_HEADER_COSMOS_EMULATOR.getBytes());
String part2 = String.format(AAD_CLAIM_COSMOS_EMULATOR_FORMAT,
currentTime.toEpochSecond(),
currentTime.toEpochSecond(),
currentTime.plusHours(2).toEpochSecond());
String part2Encoded = Utils.encodeUrlBase64String(part2.getBytes());
return part1Encoded + "." + part2Encoded + "." + this.emulatorKeyEncoded;
}
}
class ItemSample {
public String id;
public String mypk;
public String sgmts;
public String toString() {
try {
return OBJECT_MAPPER.writeValueAsString(this);
} catch (JsonProcessingException ex) {
ex.printStackTrace();
throw new RuntimeException("Unexpected object mapping exception");
}
}
}
} | class AadAuthorizationTests extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(AadAuthorizationTests.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private final static String PARTITION_KEY_PATH = "/mypk";
private final String databaseId = CosmosDatabaseForTest.generateId();
protected AadAuthorizationTests() {
}
private ItemSample getDocumentDefinition(String itemId, String partitionKeyValue) {
ItemSample itemSample = new ItemSample();
itemSample.id = itemId;
itemSample.mypk = partitionKeyValue;
itemSample.sgmts = "[[6519456, 1471916863], [2498434, 1455671440]]";
return itemSample;
}
@BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
}
@AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
}
class AadSimpleEmulatorTokenCredential implements TokenCredential {
private final String emulatorKeyEncoded;
private final String AAD_HEADER_COSMOS_EMULATOR = "{\"typ\":\"JWT\",\"alg\":\"RS256\",\"x5t\":\"CosmosEmulatorPrimaryMaster\",\"kid\":\"CosmosEmulatorPrimaryMaster\"}";
private final String AAD_CLAIM_COSMOS_EMULATOR_FORMAT = "{\"aud\":\"https:
public AadSimpleEmulatorTokenCredential(String emulatorKey) {
if (emulatorKey == null || emulatorKey.isEmpty()) {
throw new IllegalArgumentException("emulatorKey");
}
this.emulatorKeyEncoded = Utils.encodeUrlBase64String(emulatorKey.getBytes());
}
@Override
public Mono<AccessToken> getToken(TokenRequestContext tokenRequestContext) {
String aadToken = emulatorKey_based_AAD_String();
return Mono.just(new AccessToken(aadToken, OffsetDateTime.now().plusHours(2)));
}
String emulatorKey_based_AAD_String() {
ZonedDateTime currentTime = ZonedDateTime.now();
String part1Encoded = Utils.encodeUrlBase64String(AAD_HEADER_COSMOS_EMULATOR.getBytes());
String part2 = String.format(AAD_CLAIM_COSMOS_EMULATOR_FORMAT,
currentTime.toEpochSecond(),
currentTime.toEpochSecond(),
currentTime.plusHours(2).toEpochSecond());
String part2Encoded = Utils.encodeUrlBase64String(part2.getBytes());
return part1Encoded + "." + part2Encoded + "." + this.emulatorKeyEncoded;
}
}
class ItemSample {
public String id;
public String mypk;
public String sgmts;
public String toString() {
try {
return OBJECT_MAPPER.writeValueAsString(this);
} catch (JsonProcessingException ex) {
ex.printStackTrace();
throw new RuntimeException("Unexpected object mapping exception");
}
}
}
} |
why do we make a copy of the user-provided options here? | private RecognizeOptions getRecognizeOptionsProperties(RecognizeOptions userProvidedOptions) {
if (userProvidedOptions != null) {
return new RecognizeOptions()
.setPollInterval(userProvidedOptions.getPollInterval())
.setFormContentType(userProvidedOptions.getFormContentType())
.setIncludeTextContent(userProvidedOptions.isIncludeTextContent());
} else {
return new RecognizeOptions();
}
} | .setIncludeTextContent(userProvidedOptions.isIncludeTextContent()); | private RecognizeOptions getRecognizeOptionsProperties(RecognizeOptions userProvidedOptions) {
if (userProvidedOptions != null) {
return userProvidedOptions;
} else {
return new RecognizeOptions();
}
} | class FormRecognizerAsyncClient {
private final ClientLogger logger = new ClientLogger(FormRecognizerAsyncClient.class);
private final FormRecognizerClientImpl service;
private final FormRecognizerServiceVersion serviceVersion;
/**
* Create a {@link FormRecognizerAsyncClient} that sends requests to the Form Recognizer services's endpoint. Each
* service call goes through the {@link FormRecognizerClientBuilder | class FormRecognizerAsyncClient {
private final ClientLogger logger = new ClientLogger(FormRecognizerAsyncClient.class);
private final FormRecognizerClientImpl service;
private final FormRecognizerServiceVersion serviceVersion;
/**
* Create a {@link FormRecognizerAsyncClient} that sends requests to the Form Recognizer services's endpoint. Each
* service call goes through the {@link FormRecognizerClientBuilder |
`subscribe` inside another `subscribe` looks a bit odd. Can we instead use reactor pattern here? ```java formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(...) .flatMap(recognizePollingOperation -> recognizePollingOperation.getFinalResult()) .subscribe(recognizedReceipts -> {...}); ``` | public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{file_source_url}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeTextContent(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.subscribe(recognizePollingOperation -> {
recognizePollingOperation.getFinalResult().subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
});
} | RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i); | public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String fileSourceUrl = "{file_source_url}";
String modelId = "{model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(fileSourceUrl, modelId).subscribe(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult().subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
})
)
);
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String fileSourceUrl = "{file_source_url}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(fileSourceUrl, modelId,
new RecognizeOptions()
.setIncludeTextContent(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.subscribe(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult().subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
})
)
);
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File sourceFile = new File("{file_source_url}");
String modelId = "{model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(sourceFile.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, sourceFile.length(), modelId)
.subscribe(recognizePollingOperation ->
recognizePollingOperation.getFinalResult().subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
})
)
);
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File sourceFile = new File("{file_source_url}");
String modelId = "{model_id}";
boolean includeTextContent = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(sourceFile.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, sourceFile.length(), modelId,
new RecognizeOptions()
.setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeTextContent(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.subscribe(recognizePollingOperation ->
recognizePollingOperation.getFinalResult().subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
})
)
);
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String sourceFilePath = "{file_source_url}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(sourceFilePath).subscribe(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult().subscribe(layoutPageResults ->
layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
})
));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String sourceFilePath = "{file_source_url}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(sourceFilePath,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.subscribe(recognizePollingOperation ->
recognizePollingOperation.getFinalResult().subscribe(layoutPageResults ->
layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
})
));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File sourceFile = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(sourceFile.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, sourceFile.length())
.subscribe(recognizePollingOperation ->
recognizePollingOperation.getFinalResult().subscribe(layoutPageResults ->
layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
})
));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File sourceFile = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(sourceFile.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, sourceFile.length(),
new RecognizeOptions()
.setFormContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.subscribe(recognizePollingOperation -> recognizePollingOperation.getFinalResult().subscribe(
layoutPageResults -> layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
})
));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl).subscribe(recognizePollingOperation -> {
recognizePollingOperation.getFinalResult().subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File sourceFile = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(sourceFile.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, sourceFile.length())
.subscribe(recognizePollingOperation -> {
recognizePollingOperation.getFinalResult().subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File sourceFile = new File("{file_source_url}");
boolean includeTextContent = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(sourceFile.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, sourceFile.length(),
new RecognizeOptions()
.setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeTextContent(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.subscribe(recognizePollingOperation -> {
recognizePollingOperation.getFinalResult().subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
});
}
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(layoutPageResults ->
layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
} |
Use `forEach()` instead or you can also show the reactor pattern here: ```java formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length()) .flatMap(recognizePollingOperation -> recognizePollingOperation.getFinalResult()) .flatMap(recognizedReceipts -> Flux.fromIterable(recognizedReceipts)) .subscribe(recognizedReceipt -> {...}); ``` | public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
} | for (int i = 0; i < recognizedReceipts.size(); i++) { | public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(layoutPageResults ->
layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(layoutPageResults ->
layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
} |
Yep, I am preparing a follow-up PR for sample and snippets update to follow reactor pattern more! | public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
} | for (int i = 0; i < recognizedReceipts.size(); i++) { | public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(layoutPageResults ->
layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(layoutPageResults ->
layoutPageResults.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult ->
contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedReceipt recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
System.out.printf("----------- Recognized Receipt page %s -----------%n", i);
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
}
});
}
} |
Wow. | public static Object[][] responseContinuationTokenLimitParamProvider() {
CosmosQueryRequestOptions options1 = new CosmosQueryRequestOptions();
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(options1, 1);
options1.setResponseContinuationTokenLimitInKb(5);
options1.setPartitionKey(new PartitionKey("99"));
String query1 = "Select * from r";
boolean multiPartitionCollection1 = true;
CosmosQueryRequestOptions options2 = new CosmosQueryRequestOptions();
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(options2, 1);
options2.setResponseContinuationTokenLimitInKb(5);
options2.setPartitionKey(new PartitionKey("99"));
String query2 = "Select * from r order by r.prop";
boolean multiPartitionCollection2 = false;
CosmosQueryRequestOptions options3 = new CosmosQueryRequestOptions();
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(options3, 1);
options3.setResponseContinuationTokenLimitInKb(5);
options3.setPartitionKey(new PartitionKey("99"));
String query3 = "Select * from r";
boolean multiPartitionCollection3 = false;
CosmosQueryRequestOptions options4 = new CosmosQueryRequestOptions();
options4.setPartitionKey(new PartitionKey("99"));
String query4 = "Select * from r order by r.prop";
boolean multiPartitionCollection4 = false;
return new Object[][]{
{options1, query1, multiPartitionCollection1},
{options2, query2, multiPartitionCollection2},
{options3, query3, multiPartitionCollection3},
{options4, query4, multiPartitionCollection4},
};
} | options1.setResponseContinuationTokenLimitInKb(5); | new CosmosQueryRequestOptions();
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(options1, 1);
options1.setResponseContinuationTokenLimitInKb(5);
options1.setPartitionKey(new PartitionKey("99"));
String query1 = "Select * from r";
boolean multiPartitionCollection1 = true;
CosmosQueryRequestOptions options2 = new CosmosQueryRequestOptions();
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(options2, 1);
options2.setResponseContinuationTokenLimitInKb(5);
options2.setPartitionKey(new PartitionKey("99"));
String query2 = "Select * from r order by r.prop";
boolean multiPartitionCollection2 = false;
CosmosQueryRequestOptions options3 = new CosmosQueryRequestOptions();
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(options3, 1);
options3.setResponseContinuationTokenLimitInKb(5);
options3.setPartitionKey(new PartitionKey("99"));
String query3 = "Select * from r";
boolean multiPartitionCollection3 = false;
CosmosQueryRequestOptions options4 = new CosmosQueryRequestOptions();
options4.setPartitionKey(new PartitionKey("99"));
String query4 = "Select * from r order by r.prop";
boolean multiPartitionCollection4 = false;
return new Object[][]{
{options1, query1, multiPartitionCollection1},
{options2, query2, multiPartitionCollection2},
{options3, query3, multiPartitionCollection3},
{options4, query4, multiPartitionCollection4},
} | class DocumentQuerySpyWireContentTest extends TestSuiteBase {
private Database createdDatabase;
private DocumentCollection createdSinglePartitionCollection;
private DocumentCollection createdMultiPartitionCollection;
private List<Document> createdDocumentsInSinglePartitionCollection = new ArrayList<>();
private List<Document> createdDocumentsInMultiPartitionCollection = new ArrayList<>();
private SpyClientUnderTestFactory.ClientUnderTest client;
public String getSinglePartitionCollectionLink() {
return TestUtils.getCollectionNameLink(createdDatabase.getId(), createdSinglePartitionCollection.getId());
}
public String getMultiPartitionCollectionLink() {
return TestUtils.getCollectionNameLink(createdDatabase.getId(), createdMultiPartitionCollection.getId());
}
@Factory(dataProvider = "clientBuilders")
public DocumentQuerySpyWireContentTest(Builder clientBuilder) {
super(clientBuilder);
}
@DataProvider(name = "responseContinuationTokenLimitParamProvider")
public static Object[][] responseContinuationTokenLimitParamProvider() {
CosmosQueryRequestOptions options1 = ;
}
@Test(dataProvider = "responseContinuationTokenLimitParamProvider", groups = { "simple" }, timeOut = TIMEOUT)
public void queryWithContinuationTokenLimit(CosmosQueryRequestOptions options, String query, boolean isMultiParitionCollection) throws Exception {
String collectionLink;
if (isMultiParitionCollection) {
collectionLink = getMultiPartitionCollectionLink();
} else {
collectionLink = getSinglePartitionCollectionLink();
}
client.clearCapturedRequests();
Flux<FeedResponse<Document>> queryObservable = client
.queryDocuments(collectionLink, query, options);
List<Document> results = queryObservable.flatMap(p -> Flux.fromIterable(p.getResults()))
.collectList().block();
assertThat(results.size()).describedAs("total results").isGreaterThanOrEqualTo(1);
List<HttpRequest> requests = client.getCapturedRequests();
for(HttpRequest req: requests) {
validateRequestHasContinuationTokenLimit(req, options.getResponseContinuationTokenLimitInKb());
}
}
private void validateRequestHasContinuationTokenLimit(HttpRequest request, Integer expectedValue) {
Map<String, String> headers = request.headers().toMap();
if (headers.get(HttpConstants.HttpHeaders.IS_QUERY) != null) {
if (expectedValue != null && expectedValue > 0) {
assertThat(headers
.containsKey(HttpConstants.HttpHeaders.RESPONSE_CONTINUATION_TOKEN_LIMIT_IN_KB))
.isTrue();
assertThat(headers
.get("x-ms-documentdb-responsecontinuationtokenlimitinkb"))
.isEqualTo(Integer.toString(expectedValue));
} else {
assertThat(headers
.containsKey(HttpConstants.HttpHeaders.RESPONSE_CONTINUATION_TOKEN_LIMIT_IN_KB))
.isFalse();
}
}
}
public Document createDocument(AsyncDocumentClient client, String collectionLink, int cnt) {
Document docDefinition = getDocumentDefinition(cnt);
return client
.createDocument(collectionLink, docDefinition, null, false).block().getResource();
}
@BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT)
public void before_DocumentQuerySpyWireContentTest() throws Exception {
client = new SpyClientBuilder(this.clientBuilder()).build();
createdDatabase = SHARED_DATABASE;
createdSinglePartitionCollection = SHARED_SINGLE_PARTITION_COLLECTION;
truncateCollection(SHARED_SINGLE_PARTITION_COLLECTION);
createdMultiPartitionCollection = SHARED_MULTI_PARTITION_COLLECTION;
truncateCollection(SHARED_MULTI_PARTITION_COLLECTION);
for(int i = 0; i < 3; i++) {
createdDocumentsInSinglePartitionCollection.add(createDocument(client, getCollectionLink(createdSinglePartitionCollection), i));
createdDocumentsInMultiPartitionCollection.add(createDocument(client, getCollectionLink(createdMultiPartitionCollection), i));
}
for(int i = 0; i < 5; i++) {
createdDocumentsInSinglePartitionCollection.add(createDocument(client, getCollectionLink(createdSinglePartitionCollection), 99));
createdDocumentsInMultiPartitionCollection.add(createDocument(client, getCollectionLink(createdMultiPartitionCollection), 99));
}
TimeUnit.SECONDS.sleep(1);
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
client.queryDocuments(getMultiPartitionCollectionLink(), "select * from root", options)
.then().block();
client.queryDocuments(getSinglePartitionCollectionLink(), "select * from root", options)
.then().block();
}
@AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private static Document getDocumentDefinition(int cnt) {
String uuid = UUID.randomUUID().toString();
Document doc = new Document(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"prop\" : %d, "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, cnt, cnt));
return doc;
}
} | class DocumentQuerySpyWireContentTest extends TestSuiteBase {
private Database createdDatabase;
private DocumentCollection createdSinglePartitionCollection;
private DocumentCollection createdMultiPartitionCollection;
private List<Document> createdDocumentsInSinglePartitionCollection = new ArrayList<>();
private List<Document> createdDocumentsInMultiPartitionCollection = new ArrayList<>();
private SpyClientUnderTestFactory.ClientUnderTest client;
public String getSinglePartitionCollectionLink() {
return TestUtils.getCollectionNameLink(createdDatabase.getId(), createdSinglePartitionCollection.getId());
}
public String getMultiPartitionCollectionLink() {
return TestUtils.getCollectionNameLink(createdDatabase.getId(), createdMultiPartitionCollection.getId());
}
@Factory(dataProvider = "clientBuilders")
public DocumentQuerySpyWireContentTest(Builder clientBuilder) {
super(clientBuilder);
}
@DataProvider(name = "responseContinuationTokenLimitParamProvider")
public static Object[][] responseContinuationTokenLimitParamProvider() {
CosmosQueryRequestOptions options1 = ;
}
@Test(dataProvider = "responseContinuationTokenLimitParamProvider", groups = { "simple" }, timeOut = TIMEOUT)
public void queryWithContinuationTokenLimit(CosmosQueryRequestOptions options, String query, boolean isMultiParitionCollection) throws Exception {
String collectionLink;
if (isMultiParitionCollection) {
collectionLink = getMultiPartitionCollectionLink();
} else {
collectionLink = getSinglePartitionCollectionLink();
}
client.clearCapturedRequests();
Flux<FeedResponse<Document>> queryObservable = client
.queryDocuments(collectionLink, query, options);
List<Document> results = queryObservable.flatMap(p -> Flux.fromIterable(p.getResults()))
.collectList().block();
assertThat(results.size()).describedAs("total results").isGreaterThanOrEqualTo(1);
List<HttpRequest> requests = client.getCapturedRequests();
for(HttpRequest req: requests) {
validateRequestHasContinuationTokenLimit(req, options.getResponseContinuationTokenLimitInKb());
}
}
private void validateRequestHasContinuationTokenLimit(HttpRequest request, Integer expectedValue) {
Map<String, String> headers = request.headers().toMap();
if (headers.get(HttpConstants.HttpHeaders.IS_QUERY) != null) {
if (expectedValue != null && expectedValue > 0) {
assertThat(headers
.containsKey(HttpConstants.HttpHeaders.RESPONSE_CONTINUATION_TOKEN_LIMIT_IN_KB))
.isTrue();
assertThat(headers
.get("x-ms-documentdb-responsecontinuationtokenlimitinkb"))
.isEqualTo(Integer.toString(expectedValue));
} else {
assertThat(headers
.containsKey(HttpConstants.HttpHeaders.RESPONSE_CONTINUATION_TOKEN_LIMIT_IN_KB))
.isFalse();
}
}
}
public Document createDocument(AsyncDocumentClient client, String collectionLink, int cnt) {
Document docDefinition = getDocumentDefinition(cnt);
return client
.createDocument(collectionLink, docDefinition, null, false).block().getResource();
}
@BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT)
public void before_DocumentQuerySpyWireContentTest() throws Exception {
client = new SpyClientBuilder(this.clientBuilder()).build();
createdDatabase = SHARED_DATABASE;
createdSinglePartitionCollection = SHARED_SINGLE_PARTITION_COLLECTION;
truncateCollection(SHARED_SINGLE_PARTITION_COLLECTION);
createdMultiPartitionCollection = SHARED_MULTI_PARTITION_COLLECTION;
truncateCollection(SHARED_MULTI_PARTITION_COLLECTION);
for(int i = 0; i < 3; i++) {
createdDocumentsInSinglePartitionCollection.add(createDocument(client, getCollectionLink(createdSinglePartitionCollection), i));
createdDocumentsInMultiPartitionCollection.add(createDocument(client, getCollectionLink(createdMultiPartitionCollection), i));
}
for(int i = 0; i < 5; i++) {
createdDocumentsInSinglePartitionCollection.add(createDocument(client, getCollectionLink(createdSinglePartitionCollection), 99));
createdDocumentsInMultiPartitionCollection.add(createDocument(client, getCollectionLink(createdMultiPartitionCollection), 99));
}
TimeUnit.SECONDS.sleep(1);
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
client.queryDocuments(getMultiPartitionCollectionLink(), "select * from root", options)
.then().block();
client.queryDocuments(getSinglePartitionCollectionLink(), "select * from root", options)
.then().block();
}
@AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private static Document getDocumentDefinition(int cnt) {
String uuid = UUID.randomUUID().toString();
Document doc = new Document(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"prop\" : %d, "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, cnt, cnt));
return doc;
}
} |
Should we just give the `ProvisioningState` object? | public String provisioningState() {
return inner().provisioningState().toString();
} | return inner().provisioningState().toString(); | public String provisioningState() {
return inner().provisioningState().toString();
} | class ApplicationSecurityGroupImpl
extends GroupableResourceImpl<
ApplicationSecurityGroup, ApplicationSecurityGroupInner, ApplicationSecurityGroupImpl, NetworkManager>
implements ApplicationSecurityGroup, ApplicationSecurityGroup.Definition, ApplicationSecurityGroup.Update {
ApplicationSecurityGroupImpl(
final String name, final ApplicationSecurityGroupInner innerModel, final NetworkManager networkManager) {
super(name, innerModel, networkManager);
}
@Override
protected Mono<ApplicationSecurityGroupInner> getInnerAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public Mono<ApplicationSecurityGroup> createResourceAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.createOrUpdateAsync(resourceGroupName(), name(), inner())
.map(innerToFluentMap(this));
}
@Override
public String resourceGuid() {
return inner().resourceGuid();
}
@Override
} | class ApplicationSecurityGroupImpl
extends GroupableResourceImpl<
ApplicationSecurityGroup, ApplicationSecurityGroupInner, ApplicationSecurityGroupImpl, NetworkManager>
implements ApplicationSecurityGroup, ApplicationSecurityGroup.Definition, ApplicationSecurityGroup.Update {
ApplicationSecurityGroupImpl(
final String name, final ApplicationSecurityGroupInner innerModel, final NetworkManager networkManager) {
super(name, innerModel, networkManager);
}
@Override
protected Mono<ApplicationSecurityGroupInner> getInnerAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public Mono<ApplicationSecurityGroup> createResourceAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.createOrUpdateAsync(resourceGroupName(), name(), inner())
.map(innerToFluentMap(this));
}
@Override
public String resourceGuid() {
return inner().resourceGuid();
}
@Override
} |
I prefer to return string. It looks a few interfaces just return the object. Maybe we can do in another PR to align such behavior. | public String provisioningState() {
return inner().provisioningState().toString();
} | return inner().provisioningState().toString(); | public String provisioningState() {
return inner().provisioningState().toString();
} | class ApplicationSecurityGroupImpl
extends GroupableResourceImpl<
ApplicationSecurityGroup, ApplicationSecurityGroupInner, ApplicationSecurityGroupImpl, NetworkManager>
implements ApplicationSecurityGroup, ApplicationSecurityGroup.Definition, ApplicationSecurityGroup.Update {
ApplicationSecurityGroupImpl(
final String name, final ApplicationSecurityGroupInner innerModel, final NetworkManager networkManager) {
super(name, innerModel, networkManager);
}
@Override
protected Mono<ApplicationSecurityGroupInner> getInnerAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public Mono<ApplicationSecurityGroup> createResourceAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.createOrUpdateAsync(resourceGroupName(), name(), inner())
.map(innerToFluentMap(this));
}
@Override
public String resourceGuid() {
return inner().resourceGuid();
}
@Override
} | class ApplicationSecurityGroupImpl
extends GroupableResourceImpl<
ApplicationSecurityGroup, ApplicationSecurityGroupInner, ApplicationSecurityGroupImpl, NetworkManager>
implements ApplicationSecurityGroup, ApplicationSecurityGroup.Definition, ApplicationSecurityGroup.Update {
ApplicationSecurityGroupImpl(
final String name, final ApplicationSecurityGroupInner innerModel, final NetworkManager networkManager) {
super(name, innerModel, networkManager);
}
@Override
protected Mono<ApplicationSecurityGroupInner> getInnerAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public Mono<ApplicationSecurityGroup> createResourceAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.createOrUpdateAsync(resourceGroupName(), name(), inner())
.map(innerToFluentMap(this));
}
@Override
public String resourceGuid() {
return inner().resourceGuid();
}
@Override
} |
I think object would be better for comparison. Since it is an enum. | public String provisioningState() {
return inner().provisioningState().toString();
} | return inner().provisioningState().toString(); | public String provisioningState() {
return inner().provisioningState().toString();
} | class ApplicationSecurityGroupImpl
extends GroupableResourceImpl<
ApplicationSecurityGroup, ApplicationSecurityGroupInner, ApplicationSecurityGroupImpl, NetworkManager>
implements ApplicationSecurityGroup, ApplicationSecurityGroup.Definition, ApplicationSecurityGroup.Update {
ApplicationSecurityGroupImpl(
final String name, final ApplicationSecurityGroupInner innerModel, final NetworkManager networkManager) {
super(name, innerModel, networkManager);
}
@Override
protected Mono<ApplicationSecurityGroupInner> getInnerAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public Mono<ApplicationSecurityGroup> createResourceAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.createOrUpdateAsync(resourceGroupName(), name(), inner())
.map(innerToFluentMap(this));
}
@Override
public String resourceGuid() {
return inner().resourceGuid();
}
@Override
} | class ApplicationSecurityGroupImpl
extends GroupableResourceImpl<
ApplicationSecurityGroup, ApplicationSecurityGroupInner, ApplicationSecurityGroupImpl, NetworkManager>
implements ApplicationSecurityGroup, ApplicationSecurityGroup.Definition, ApplicationSecurityGroup.Update {
ApplicationSecurityGroupImpl(
final String name, final ApplicationSecurityGroupInner innerModel, final NetworkManager networkManager) {
super(name, innerModel, networkManager);
}
@Override
protected Mono<ApplicationSecurityGroupInner> getInnerAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public Mono<ApplicationSecurityGroup> createResourceAsync() {
return this
.manager()
.inner()
.getApplicationSecurityGroups()
.createOrUpdateAsync(resourceGroupName(), name(), inner())
.map(innerToFluentMap(this));
}
@Override
public String resourceGuid() {
return inner().resourceGuid();
}
@Override
} |
`topic.getName() == null ` : NullPointerException ? | Mono<Response<TopicDescription>> updateTopicWithResponse(TopicDescription topic, Context context) {
if (topic == null) {
return monoError(logger, new NullPointerException("'topic' cannot be null"));
} else if (topic.getName() == null || topic.getName().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topic.getName' cannot be null or empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateTopicBodyContent content = new CreateTopicBodyContent()
.setType(CONTENT_TYPE)
.setTopicDescription(topic);
final CreateTopicBody createEntity = new CreateTopicBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(topic.getName(), createEntity, "*", withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeTopic(response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
} | return monoError(logger, new IllegalArgumentException("'topic.getName' cannot be null or empty.")); | return monoError(logger, new NullPointerException("'topic' cannot be null"));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
} | class ServiceBusManagementAsyncClient {
private static final String SERVICE_BUS_TRACING_NAMESPACE_VALUE = "Microsoft.ServiceBus";
private static final String CONTENT_TYPE = "application/xml";
private static final String QUEUES_ENTITY_TYPE = "queues";
private static final String TOPICS_ENTITY_TYPE = "topics";
private static final int NUMBER_OF_ELEMENTS = 100;
private final ServiceBusManagementClientImpl managementClient;
private final EntitysImpl entityClient;
private final ClientLogger logger = new ClientLogger(ServiceBusManagementAsyncClient.class);
private final ServiceBusManagementSerializer serializer;
/**
* Creates a new instance with the given management client and serializer.
*
* @param managementClient Client to make management calls.
* @param serializer Serializer to deserialize ATOM XML responses.
*/
ServiceBusManagementAsyncClient(ServiceBusManagementClientImpl managementClient,
ServiceBusManagementSerializer serializer) {
this.managementClient = Objects.requireNonNull(managementClient, "'managementClient' cannot be null.");
this.entityClient = managementClient.getEntitys();
this.serializer = serializer;
}
/**
* Creates a queue with the given name.
*
* @param queueName Name of the queue to create.
*
* @return A Mono that completes with information about the created queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws NullPointerException if {@code queueName} is null.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws ResourceExistsException if a queue exists with the same {@code queueName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueDescription> createQueue(String queueName) {
try {
return createQueue(new QueueDescription(queueName));
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Creates a queue with the {@link QueueDescription}.
*
* @param queue Information about the queue to create.
*
* @return A Mono that completes with information about the created queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link QueueDescription
* string.
* @throws NullPointerException if {@code queue} is null.
* @throws ResourceExistsException if a queue exists with the same {@link QueueDescription
* queueName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueDescription> createQueue(QueueDescription queue) {
return createQueueWithResponse(queue).map(Response::getValue);
}
/**
* Creates a queue and returns the created queue in addition to the HTTP response.
*
* @param queue The queue to create.
*
* @return A Mono that returns the created queue in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link QueueDescription
* string.
* @throws NullPointerException if {@code queue} is null.
* @throws ResourceExistsException if a queue exists with the same {@link QueueDescription
* queueName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueueDescription>> createQueueWithResponse(QueueDescription queue) {
return withContext(context -> createQueueWithResponse(queue, context));
}
/**
* Creates a subscription with the given topic and subscription names.
*
* @param topicName Name of the topic associated with subscription.
* @param subscriptionName Name of the subscription.
*
* @return A Mono that completes with information about the created subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the quota is exceeded, or an error occurred
* processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} are null or are empty strings.
* @throws ResourceExistsException if a subscription exists with the same topic and subscription name.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionDescription> createSubscription(String topicName, String subscriptionName) {
try {
return createSubscription(new SubscriptionDescription(topicName, subscriptionName));
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Creates a subscription with the {@link SubscriptionDescription}.
*
* @param subscription Information about the subscription to create.
*
* @return A Mono that completes with information about the created subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the quota is exceeded, or an error occurred
* processing the request.
* @throws NullPointerException if {@code subscription} is null.
* @throws ResourceExistsException if a subscription exists with the same topic and subscription name.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionDescription> createSubscription(SubscriptionDescription subscription) {
return createSubscriptionWithResponse(subscription).map(Response::getValue);
}
/**
* Creates a queue and returns the created queue in addition to the HTTP response.
*
* @param subscription Information about the subscription to create.
*
* @return A Mono that returns the created queue in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the quota is exceeded, or an error occurred
* processing the request.
* @throws NullPointerException if {@code subscription} is null.
* @throws ResourceExistsException if a subscription exists with the same topic and subscription name.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SubscriptionDescription>> createSubscriptionWithResponse(
SubscriptionDescription subscription) {
return withContext(context -> createSubscriptionWithResponse(subscription, context));
}
/**
* Creates a topic with the given name.
*
* @param topicName Name of the topic to create.
*
* @return A Mono that completes with information about the created topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is null or an empty string.
* @throws ResourceExistsException if a topic exists with the same {@code topicName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicDescription> createTopic(String topicName) {
try {
return createTopic(new TopicDescription(topicName));
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Creates a topic with the {@link TopicDescription}.
*
* @param topic Information about the topic to create.
*
* @return A Mono that completes with information about the created topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link TopicDescription
* string.
* @throws NullPointerException if {@code topic} is null.
* @throws ResourceExistsException if a topic exists with the same {@link TopicDescription
* topicName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicDescription> createTopic(TopicDescription topic) {
return createTopicWithResponse(topic).map(Response::getValue);
}
/**
* Creates a topic and returns the created topic in addition to the HTTP response.
*
* @param topic The topic to create.
*
* @return A Mono that returns the created topic in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link TopicDescription
* string.
* @throws NullPointerException if {@code topic} is null.
* @throws ResourceExistsException if a topic exists with the same {@link TopicDescription
* topicName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopicDescription>> createTopicWithResponse(TopicDescription topic) {
return withContext(context -> createTopicWithResponse(topic, context));
}
/**
* Deletes a queue the matching {@code queueName}.
*
* @param queueName Name of queue to delete.
*
* @return A Mono that completes when the queue is deleted.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteQueue(String queueName) {
return deleteQueueWithResponse(queueName).then();
}
/**
* Deletes a queue the matching {@code queueName} and returns the HTTP response.
*
* @param queueName Name of queue to delete.
*
* @return A Mono that completes when the queue is deleted and returns the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> deleteQueueWithResponse(String queueName) {
return withContext(context -> deleteQueueWithResponse(queueName, context));
}
/**
* Deletes a subscription the matching {@code subscriptionName}.
*
* @param topicName Name of topic associated with subscription to delete.
* @param subscriptionName Name of subscription to delete.
*
* @return A Mono that completes when the subscription is deleted.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} is an empty string.
* @throws NullPointerException if {@code topicName} or {@code subscriptionName} is null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteSubscription(String topicName, String subscriptionName) {
return deleteSubscriptionWithResponse(topicName, subscriptionName).then();
}
/**
* Deletes a subscription the matching {@code subscriptionName} and returns the HTTP response.
*
* @param topicName Name of topic associated with subscription to delete.
* @param subscriptionName Name of subscription to delete.
*
* @return A Mono that completes when the subscription is deleted and returns the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} is an empty string.
* @throws NullPointerException if {@code topicName} or {@code subscriptionName} is null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> deleteSubscriptionWithResponse(String topicName, String subscriptionName) {
return withContext(context -> deleteSubscriptionWithResponse(topicName, subscriptionName, context));
}
/**
* Deletes a topic the matching {@code topicName}.
*
* @param topicName Name of topic to delete.
*
* @return A Mono that completes when the topic is deleted.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteTopic(String topicName) {
return deleteTopicWithResponse(topicName).then();
}
/**
* Deletes a topic the matching {@code topicName} and returns the HTTP response.
*
* @param topicName Name of topic to delete.
*
* @return A Mono that completes when the topic is deleted and returns the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> deleteTopicWithResponse(String topicName) {
return withContext(context -> deleteTopicWithResponse(topicName, context));
}
/**
* Gets information about the queue.
*
* @param queueName Name of queue to get information about.
*
* @return A Mono that completes with information about the queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueDescription> getQueue(String queueName) {
return getQueueWithResponse(queueName).map(Response::getValue);
}
/**
* Gets information about the queue along with its HTTP response.
*
* @param queueName Name of queue to get information about.
*
* @return A Mono that completes with information about the queue and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueueDescription>> getQueueWithResponse(String queueName) {
return withContext(context -> getQueueWithResponse(queueName, context, Function.identity()));
}
/**
* Gets runtime information about the queue.
*
* @param queueName Name of queue to get information about.
*
* @return A Mono that completes with runtime information about the queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueRuntimeInfo> getQueueRuntimeInfo(String queueName) {
return getQueueRuntimeInfoWithResponse(queueName).map(response -> response.getValue());
}
/**
* Gets runtime information about the queue along with its HTTP response.
*
* @param queueName Name of queue to get information about.
*
* @return A Mono that completes with runtime information about the queue and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueueRuntimeInfo>> getQueueRuntimeInfoWithResponse(String queueName) {
return withContext(context -> getQueueWithResponse(queueName, context, QueueRuntimeInfo::new));
}
/**
* Gets information about the queue.
*
* @param topicName Name of topic associated with subscription.
* @param subscriptionName Name of subscription to get information about.
*
* @return A Mono that completes with information about the subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} are null or empty strings.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist in the {@code topicName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionDescription> getSubscription(String topicName, String subscriptionName) {
return getSubscriptionWithResponse(topicName, subscriptionName).map(Response::getValue);
}
/**
* Gets information about the subscription along with its HTTP response.
*
* @param topicName Name of topic associated with subscription.
* @param subscriptionName Name of subscription to get information about.
*
* @return A Mono that completes with information about the subscription and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} are null or empty strings.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SubscriptionDescription>> getSubscriptionWithResponse(String topicName,
String subscriptionName) {
return withContext(context -> getSubscriptionWithResponse(topicName, subscriptionName, context,
Function.identity()));
}
/**
* Gets runtime information about the queue.
*
* @param topicName Name of topic associated with subscription.
* @param subscriptionName Name of subscription to get information about.
*
* @return A Mono that completes with runtime information about the queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code subscriptionName} is an empty string.
* @throws NullPointerException if {@code subscriptionName} is null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionRuntimeInfo> getSubscriptionRuntimeInfo(String topicName, String subscriptionName) {
return getSubscriptionRuntimeInfoWithResponse(topicName, subscriptionName)
.map(response -> response.getValue());
}
/**
* Gets runtime information about the queue.
*
* @param topicName Name of topic associated with subscription.
* @param subscriptionName Name of subscription to get information about.
*
* @return A Mono that completes with runtime information about the queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code subscriptionName} is an empty string.
* @throws NullPointerException if {@code subscriptionName} is null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SubscriptionRuntimeInfo>> getSubscriptionRuntimeInfoWithResponse(String topicName,
String subscriptionName) {
return withContext(context -> getSubscriptionWithResponse(topicName, subscriptionName, context,
SubscriptionRuntimeInfo::new));
}
/**
* Gets information about the topic.
*
* @param topicName Name of topic to get information about.
*
* @return A Mono that completes with information about the topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicDescription> getTopic(String topicName) {
return getTopicWithResponse(topicName).map(Response::getValue);
}
/**
* Gets information about the topic along with its HTTP response.
*
* @param topicName Name of topic to get information about.
*
* @return A Mono that completes with information about the topic and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopicDescription>> getTopicWithResponse(String topicName) {
return withContext(context -> getTopicWithResponse(topicName, context, Function.identity()));
}
/**
* Gets runtime information about the topic.
*
* @param topicName Name of topic to get information about.
*
* @return A Mono that completes with runtime information about the topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicRuntimeInfo> getTopicRuntimeInfo(String topicName) {
return getTopicRuntimeInfoWithResponse(topicName).map(response -> response.getValue());
}
/**
* Gets runtime information about the topic with its HTTP response.
*
* @param topicName Name of topic to get information about.
*
* @return A Mono that completes with runtime information about the topic and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopicRuntimeInfo>> getTopicRuntimeInfoWithResponse(String topicName) {
return withContext(context -> getTopicWithResponse(topicName, context, TopicRuntimeInfo::new));
}
/**
* Fetches all the queues in the Service Bus namespace.
*
* @return A Flux of {@link QueueDescription queues} in the Service Bus namespace.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @see <a href="https:
* authorization rules</a>
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<QueueDescription> listQueues() {
return new PagedFlux<>(
() -> withContext(context -> listQueuesFirstPage(context)),
token -> withContext(context -> listQueuesNextPage(token, context)));
}
/**
* Fetches all the subscriptions for a topic.
*
* @param topicName The topic name under which all the subscriptions need to be retrieved.
*
* @return A Flux of {@link SubscriptionDescription subscriptions} for the {@code topicName}.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws NullPointerException if {@code topicName} is null.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @see <a href="https:
* authorization rules</a>
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<SubscriptionDescription> listSubscriptions(String topicName) {
if (topicName == null) {
return pagedFluxError(logger, new NullPointerException("'topicName' cannot be null."));
} else if (topicName.isEmpty()) {
return pagedFluxError(logger, new IllegalArgumentException("'topicName' cannot be an empty string."));
}
return new PagedFlux<>(
() -> withContext(context -> listSubscriptionsFirstPage(topicName, context)),
token -> withContext(context -> listSubscriptionsNextPage(topicName, token, context)));
}
/**
* Fetches all the topics in the Service Bus namespace.
*
* @return A Flux of {@link TopicDescription topics} in the Service Bus namespace.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @see <a href="https:
* authorization rules</a>
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<TopicDescription> listTopics() {
return new PagedFlux<>(
() -> withContext(context -> listTopicsFirstPage(context)),
token -> withContext(context -> listTopicsNextPage(token, context)));
}
/**
* Updates a queue with the given {@link QueueDescription}. The {@link QueueDescription} must be fully populated as
* all of the properties are replaced. If a property is not set the service default value is used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. They are:
* <ul>
* <li>{@link QueueDescription
* <li>{@link QueueDescription
* <li>{@link QueueDescription
* </li>
* <li>{@link QueueDescription
* </ul>
*
* @param queue Information about the queue to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that completes with the updated queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link QueueDescription
* string.
* @throws NullPointerException if {@code queue} is null.
* @see <a href="https:
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueDescription> updateQueue(QueueDescription queue) {
return updateQueueWithResponse(queue).map(Response::getValue);
}
/**
* Updates a queue with the given {@link QueueDescription}. The {@link QueueDescription} must be fully populated as
* all of the properties are replaced. If a property is not set the service default value is used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. They are:
* <ul>
* <li>{@link QueueDescription
* <li>{@link QueueDescription
* <li>{@link QueueDescription
* </li>
* <li>{@link QueueDescription
* </ul>
*
* @param queue Information about the queue to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that returns the updated queue in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link QueueDescription
* string.
* @throws NullPointerException if {@code queue} is null.
* @see <a href="https:
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueueDescription>> updateQueueWithResponse(QueueDescription queue) {
return withContext(context -> updateQueueWithResponse(queue, context));
}
/**
* Updates a subscription with the given {@link SubscriptionDescription}. The {@link SubscriptionDescription} must
* be fully populated as all of the properties are replaced. If a property is not set the service default value is
* used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. They are:
* <ul>
* <li>{@link SubscriptionDescription
* <li>{@link SubscriptionDescription
* <li>{@link SubscriptionDescription
* </ul>
*
* @param subscription Information about the subscription to update. You must provide all the property values
* that are desired on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that returns the updated subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the subscription quota is exceeded, or an
* error occurred processing the request.
* @throws IllegalArgumentException if {@link SubscriptionDescription
* SubscriptionDescription
* @throws NullPointerException if {@code subscription} is null.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionDescription> updateSubscription(SubscriptionDescription subscription) {
return updateSubscriptionWithResponse(subscription).map(Response::getValue);
}
/**
* Updates a subscription with the given {@link SubscriptionDescription}. The {@link SubscriptionDescription} must
* be fully populated as all of the properties are replaced. If a property is not set the service default value is
* used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. They are:
* <ul>
* <li>{@link SubscriptionDescription
* <li>{@link SubscriptionDescription
* <li>{@link SubscriptionDescription
* </ul>
*
* @param subscription Information about the subscription to update. You must provide all the property values
* that are desired on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that returns the updated subscription in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the subscription quota is exceeded, or an
* error occurred processing the request.
* @throws IllegalArgumentException if {@link SubscriptionDescription
* SubscriptionDescription
* @throws NullPointerException if {@code subscription} is null.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SubscriptionDescription>> updateSubscriptionWithResponse(
SubscriptionDescription subscription) {
return withContext(context -> updateSubscriptionWithResponse(subscription, context));
}
/**
* Updates a topic with the given {@link TopicDescription}. The {@link TopicDescription} must be fully populated as
* all of the properties are replaced. If a property is not set the service default value is used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. They are:
* <ul>
* <li>{@link TopicDescription
* <li>{@link TopicDescription
* </li>
* </ul>
*
* @param topic Information about the topic to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that completes with the updated topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link TopicDescription
* string.
* @throws NullPointerException if {@code topic} is null.
* @see <a href="https:
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicDescription> updateTopic(TopicDescription topic) {
return updateTopicWithResponse(topic).map(Response::getValue);
}
/**
* Updates a topic with the given {@link TopicDescription}. The {@link TopicDescription} must be fully populated as
* all of the properties are replaced. If a property is not set the service default value is used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. They are:
* <ul>
* <li>{@link TopicDescription
* <li>{@link TopicDescription
* </li>
* </ul>
*
* @param topic Information about the topic to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that completes with the updated topic and its HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link TopicDescription
* string.
* @throws NullPointerException if {@code topic} is null.
* @see <a href="https:
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopicDescription>> updateTopicWithResponse(TopicDescription topic) {
return withContext(context -> updateTopicWithResponse(topic, context));
}
/**
* Creates a queue with its context.
*
* @param queue Queue to create.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link QueueDescription}.
*/
Mono<Response<QueueDescription>> createQueueWithResponse(QueueDescription queue, Context context) {
if (queue == null) {
return monoError(logger, new NullPointerException("'queue' cannot be null"));
} else if (queue.getName() == null || queue.getName().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'queue.getName' cannot be null or empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateQueueBodyContent content = new CreateQueueBodyContent()
.setType(CONTENT_TYPE)
.setQueueDescription(queue);
final CreateQueueBody createEntity = new CreateQueueBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(queue.getName(), createEntity, null, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(this::deserializeQueue);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a subscription with its context.
*
* @param subscription Subscription to create.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link SubscriptionDescription}.
*/
Mono<Response<SubscriptionDescription>> createSubscriptionWithResponse(SubscriptionDescription subscription,
Context context) {
if (subscription == null) {
return monoError(logger, new NullPointerException("'subscription' cannot be null"));
}
final CreateSubscriptionBodyContent content = new CreateSubscriptionBodyContent()
.setType(CONTENT_TYPE)
.setSubscriptionDescription(subscription);
final CreateSubscriptionBody createEntity = new CreateSubscriptionBody().setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return managementClient.getSubscriptions().putWithResponseAsync(subscription.getTopicName(),
subscription.getSubscriptionName(), createEntity, null, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeSubscription(subscription.getTopicName(), response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a topic with its context.
*
* @param topic Topic to create.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link TopicDescription}.
*/
Mono<Response<TopicDescription>> createTopicWithResponse(TopicDescription topic, Context context) {
if (topic == null) {
else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateTopicBodyContent content = new CreateTopicBodyContent()
.setType(CONTENT_TYPE)
.setTopicDescription(topic);
final CreateTopicBody createEntity = new CreateTopicBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(topic.getName(), createEntity, null, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(this::deserializeTopic);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Deletes a queue with its context.
*
* @param queueName Name of queue to delete.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link QueueDescription}.
*/
Mono<Response<Void>> deleteQueueWithResponse(String queueName, Context context) {
if (queueName == null) {
return monoError(logger, new NullPointerException("'queueName' cannot be null"));
} else if (queueName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'queueName' cannot be empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.deleteWithResponseAsync(queueName, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> {
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), null);
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Deletes a subscription with its context.
*
* @param topicName Name of topic associated with subscription to delete.
* @param subscriptionName Name of subscription to delete.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link SubscriptionDescription}.
*/
Mono<Response<Void>> deleteSubscriptionWithResponse(String topicName, String subscriptionName, Context context) {
if (subscriptionName == null) {
return monoError(logger, new NullPointerException("'subscriptionName' cannot be null"));
} else if (subscriptionName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'subscriptionName' cannot be empty."));
} else if (topicName == null) {
return monoError(logger, new NullPointerException("'topicName' cannot be null"));
} else if (topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topicName' cannot be empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return managementClient.getSubscriptions().deleteWithResponseAsync(topicName, subscriptionName,
withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), null));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Deletes a topic with its context.
*
* @param topicName Name of topic to delete.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link TopicDescription}.
*/
Mono<Response<Void>> deleteTopicWithResponse(String topicName, Context context) {
if (topicName == null) {
return monoError(logger, new NullPointerException("'topicName' cannot be null"));
} else if (topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topicName' cannot be empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.deleteWithResponseAsync(topicName, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), null));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets a queue with its context.
*
* @param queueName Name of queue to fetch information for.
* @param context Context to pass into request.
*
* @return A Mono that completes with the {@link QueueDescription}.
*/
<T> Mono<Response<T>> getQueueWithResponse(String queueName, Context context,
Function<QueueDescription, T> mapper) {
if (queueName == null) {
return monoError(logger, new NullPointerException("'queueName' cannot be null"));
} else if (queueName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'queueName' cannot be empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.getWithResponseAsync(queueName, true, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> {
final Response<QueueDescription> deserialize = deserializeQueue(response);
final T mapped = deserialize.getValue() != null
? mapper.apply(deserialize.getValue())
: null;
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
mapped);
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets a subscription with its context.
*
* @param topicName Name of the topic associated with the subscription.
* @param subscriptionName Name of subscription to fetch information for.
* @param context Context to pass into request.
*
* @return A Mono that completes with the {@link SubscriptionDescription}.
*/
<T> Mono<Response<T>> getSubscriptionWithResponse(String topicName, String subscriptionName, Context context,
Function<SubscriptionDescription, T> mapper) {
if (topicName == null || topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topicName' cannot be null or an empty string."));
} else if (subscriptionName == null || subscriptionName.isEmpty()) {
return monoError(logger,
new IllegalArgumentException("'subscriptionName' cannot be null or an empty string."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return managementClient.getSubscriptions().getWithResponseAsync(topicName, subscriptionName, true,
withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> {
final Response<SubscriptionDescription> deserialize = deserializeSubscription(topicName, response);
final T mapped = deserialize.getValue() != null
? mapper.apply(deserialize.getValue())
: null;
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
mapped);
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets a topic with its context.
*
* @param topicName Name of topic to fetch information for.
* @param context Context to pass into request.
*
* @return A Mono that completes with the {@link TopicDescription}.
*/
<T> Mono<Response<T>> getTopicWithResponse(String topicName, Context context,
Function<TopicDescription, T> mapper) {
if (topicName == null) {
return monoError(logger, new NullPointerException("'topicName' cannot be null"));
} else if (topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topicName' cannot be empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.getWithResponseAsync(topicName, true, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> {
final Response<TopicDescription> deserialize = deserializeTopic(response);
final T mapped = deserialize.getValue() != null
? mapper.apply(deserialize.getValue())
: null;
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
mapped);
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the first page of queues with context.
*
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of queues.
*/
Mono<PagedResponse<QueueDescription>> listQueuesFirstPage(Context context) {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return listQueues(0, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the next page of queues with context.
*
* @param continuationToken Number of items to skip in feed.
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of queues or empty if there are no items left.
*/
Mono<PagedResponse<QueueDescription>> listQueuesNextPage(String continuationToken, Context context) {
if (continuationToken == null || continuationToken.isEmpty()) {
return Mono.empty();
}
try {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
final int skip = Integer.parseInt(continuationToken);
return listQueues(skip, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the first page of subscriptions with context.
*
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of subscriptions.
*/
Mono<PagedResponse<SubscriptionDescription>> listSubscriptionsFirstPage(String topicName, Context context) {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return listSubscriptions(topicName, 0, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the next page of subscriptions with context.
*
* @param continuationToken Number of items to skip in feed.
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of subscriptions or empty if there are no items left.
*/
Mono<PagedResponse<SubscriptionDescription>> listSubscriptionsNextPage(String topicName, String continuationToken,
Context context) {
if (continuationToken == null || continuationToken.isEmpty()) {
return Mono.empty();
}
try {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
final int skip = Integer.parseInt(continuationToken);
return listSubscriptions(topicName, skip, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the first page of topics with context.
*
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of topics.
*/
Mono<PagedResponse<TopicDescription>> listTopicsFirstPage(Context context) {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return listTopics(0, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the next page of topics with context.
*
* @param continuationToken Number of items to skip in feed.
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of topics or empty if there are no items left.
*/
Mono<PagedResponse<TopicDescription>> listTopicsNextPage(String continuationToken, Context context) {
if (continuationToken == null || continuationToken.isEmpty()) {
return Mono.empty();
}
try {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
final int skip = Integer.parseInt(continuationToken);
return listTopics(skip, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Updates a queue with its context.
*
* @param queue Information about the queue to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
* @param context Context to pass into request.
*
* @return A Mono that completes with the updated {@link QueueDescription}.
*/
Mono<Response<QueueDescription>> updateQueueWithResponse(QueueDescription queue, Context context) {
if (queue == null) {
return monoError(logger, new NullPointerException("'queue' cannot be null"));
} else if (queue.getName() == null || queue.getName().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'queue.getName' cannot be null or empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateQueueBodyContent content = new CreateQueueBodyContent()
.setType(CONTENT_TYPE)
.setQueueDescription(queue);
final CreateQueueBody createEntity = new CreateQueueBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(queue.getName(), createEntity, "*", withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeQueue(response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Updates a subscription with its context.
*
* @param subscription Information about the subscription to update. You must provide all the property values
* that are desired on the updated entity. Any values not provided are set to the service default values.
* @param context Context to pass into request.
*
* @return A Mono that completes with the updated {@link SubscriptionDescription}.
*/
Mono<Response<SubscriptionDescription>> updateSubscriptionWithResponse(SubscriptionDescription subscription,
Context context) {
if (subscription == null) {
return monoError(logger, new NullPointerException("'subscription' cannot be null"));
}
final String topicName = subscription.getTopicName();
final String subscriptionName = subscription.getSubscriptionName();
if (topicName == null || topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'getTopicName' cannot be null or empty."));
} else if (subscriptionName == null || subscriptionName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'getSubscriptionName' cannot be null or empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateSubscriptionBodyContent content = new CreateSubscriptionBodyContent()
.setType(CONTENT_TYPE)
.setSubscriptionDescription(subscription);
final CreateSubscriptionBody createEntity = new CreateSubscriptionBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return managementClient.getSubscriptions().putWithResponseAsync(topicName, subscriptionName, createEntity,
"*", withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeSubscription(topicName, response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Updates a topic with its context.
*
* @param topic Information about the topic to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
* @param context Context to pass into request.
*
* @return A Mono that completes with the updated {@link TopicDescription}.
*/
Mono<Response<TopicDescription>> updateTopicWithResponse(TopicDescription topic, Context context) {
if (topic == null) {
return monoError(logger, new NullPointerException("'topic' cannot be null"));
} else if (topic.getName() == null || topic.getName().isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topic.getName' cannot be null or empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateTopicBodyContent content = new CreateTopicBodyContent()
.setType(CONTENT_TYPE)
.setTopicDescription(topic);
final CreateTopicBody createEntity = new CreateTopicBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(topic.getName(), createEntity, "*", withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeTopic(response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
private <T> T deserialize(Object object, Class<T> clazz) {
if (object == null) {
return null;
}
final String contents = String.valueOf(object);
if (contents.isEmpty()) {
return null;
}
try {
return serializer.deserialize(contents, clazz);
} catch (IOException e) {
throw logger.logExceptionAsError(new RuntimeException(String.format(
"Exception while deserializing. Body: [%s]. Class: %s", contents, clazz), e));
}
}
/**
* Given an HTTP response, will deserialize it into a strongly typed Response object.
*
* @param response HTTP response to deserialize response body from.
* @param clazz Class to deserialize response type into.
* @param <T> Class type to deserialize response into.
*
* @return A Response with a strongly typed response value.
*/
private <T> Response<T> deserialize(Response<Object> response, Class<T> clazz) {
final T deserialize = deserialize(response.getValue(), clazz);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
deserialize);
}
/**
* Converts a Response into its corresponding {@link QueueDescriptionEntry} then mapped into {@link
* QueueDescription}.
*
* @param response HTTP Response to deserialize.
*
* @return The corresponding HTTP response with convenience properties set.
*/
private Response<QueueDescription> deserializeQueue(Response<Object> response) {
final QueueDescriptionEntry entry = deserialize(response.getValue(), QueueDescriptionEntry.class);
if (entry == null) {
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
} else if (entry.getContent() == null) {
logger.warning("entry.getContent() is null. There should have been content returned. Entry: {}", entry);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
}
final QueueDescription result = entry.getContent().getQueueDescription();
final String queueName = getTitleValue(entry.getTitle());
EntityHelper.setQueueName(result, queueName);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), result);
}
/**
* Converts a Response into its corresponding {@link SubscriptionDescriptionEntry} then mapped into {@link
* SubscriptionDescription}.
*
* @param response HTTP Response to deserialize.
*
* @return The corresponding HTTP response with convenience properties set.
*/
private Response<SubscriptionDescription> deserializeSubscription(String topicName, Response<Object> response) {
final SubscriptionDescriptionEntry entry = deserialize(response.getValue(), SubscriptionDescriptionEntry.class);
if (entry == null) {
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
} else if (entry.getContent() == null) {
logger.warning("entry.getContent() is null. There should have been content returned. Entry: {}", entry);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
}
final SubscriptionDescription subscription = entry.getContent().getSubscriptionDescription();
final String subscriptionName = getTitleValue(entry.getTitle());
EntityHelper.setSubscriptionName(subscription, subscriptionName);
EntityHelper.setTopicName(subscription, topicName);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
subscription);
}
/**
* Converts a Response into its corresponding {@link TopicDescriptionEntry} then mapped into {@link
* QueueDescription}.
*
* @param response HTTP Response to deserialize.
*
* @return The corresponding HTTP response with convenience properties set.
*/
private Response<TopicDescription> deserializeTopic(Response<Object> response) {
final TopicDescriptionEntry entry = deserialize(response.getValue(), TopicDescriptionEntry.class);
if (entry == null) {
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
} else if (entry.getContent() == null) {
logger.warning("entry.getContent() is null. There should have been content returned. Entry: {}", entry);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
}
final TopicDescription result = entry.getContent().getTopicDescription();
final String queueName = getTitleValue(entry.getTitle());
EntityHelper.setTopicName(result, queueName);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), result);
}
/**
* Creates a {@link FeedPage} given the elements and a set of response links to get the next link from.
*
* @param entities Entities in the feed.
* @param responseLinks Links returned from the feed.
* @param <TResult> Type of Service Bus entities in page.
*
* @return A {@link FeedPage} indicating whether this can be continued or not.
* @throws MalformedURLException if the "next" page link does not contain a well-formed URL.
*/
private <TResult, TFeed> FeedPage<TResult> extractPage(Response<TFeed> response, List<TResult> entities,
List<ResponseLink> responseLinks)
throws MalformedURLException, UnsupportedEncodingException {
final Optional<ResponseLink> nextLink = responseLinks.stream()
.filter(link -> link.getRel().equalsIgnoreCase("next"))
.findFirst();
if (!nextLink.isPresent()) {
return new FeedPage<>(response.getStatusCode(), response.getHeaders(), response.getRequest(), entities);
}
final URL url = new URL(nextLink.get().getHref());
final String decode = URLDecoder.decode(url.getQuery(), StandardCharsets.UTF_8.name());
final Optional<Integer> skipParameter = Arrays.stream(decode.split("&|&"))
.map(part -> part.split("=", 2))
.filter(parts -> parts[0].equalsIgnoreCase("$skip") && parts.length == 2)
.map(parts -> Integer.valueOf(parts[1]))
.findFirst();
if (skipParameter.isPresent()) {
return new FeedPage<>(response.getStatusCode(), response.getHeaders(), response.getRequest(), entities,
skipParameter.get());
} else {
logger.warning("There should have been a skip parameter for the next page.");
return new FeedPage<>(response.getStatusCode(), response.getHeaders(), response.getRequest(), entities);
}
}
/**
* Helper method that invokes the service method, extracts the data and translates it to a PagedResponse.
*
* @param skip Number of elements to skip.
* @param context Context for the query.
*
* @return A Mono that completes with a paged response of queues.
*/
private Mono<PagedResponse<QueueDescription>> listQueues(int skip, Context context) {
return managementClient.listEntitiesWithResponseAsync(QUEUES_ENTITY_TYPE, skip, ServiceBusManagementAsyncClient.NUMBER_OF_ELEMENTS, context)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.flatMap(response -> {
final Response<QueueDescriptionFeed> feedResponse = deserialize(response, QueueDescriptionFeed.class);
final QueueDescriptionFeed feed = feedResponse.getValue();
if (feed == null) {
logger.warning("Could not deserialize QueueDescriptionFeed. skip {}, top: {}", skip, ServiceBusManagementAsyncClient.NUMBER_OF_ELEMENTS);
return Mono.empty();
}
final List<QueueDescription> entities = feed.getEntry().stream()
.filter(e -> e.getContent() != null && e.getContent().getQueueDescription() != null)
.map(e -> {
final String queueName = getTitleValue(e.getTitle());
final QueueDescription queueDescription = e.getContent().getQueueDescription();
EntityHelper.setQueueName(queueDescription, queueName);
return queueDescription;
})
.collect(Collectors.toList());
try {
return Mono.just(extractPage(feedResponse, entities, feed.getLink()));
} catch (MalformedURLException | UnsupportedEncodingException error) {
return Mono.error(new RuntimeException("Could not parse response into FeedPage<QueueDescription>",
error));
}
});
}
/**
* Helper method that invokes the service method, extracts the data and translates it to a PagedResponse.
*
* @param skip Number of elements to skip.
* @param context Context for the query.
*
* @return A Mono that completes with a paged response of subscriptions.
*/
private Mono<PagedResponse<SubscriptionDescription>> listSubscriptions(String topicName, int skip,
Context context) {
return managementClient.listSubscriptionsWithResponseAsync(topicName, skip, NUMBER_OF_ELEMENTS, context)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.flatMap(response -> {
final Response<SubscriptionDescriptionFeed> feedResponse = deserialize(response,
SubscriptionDescriptionFeed.class);
final SubscriptionDescriptionFeed feed = feedResponse.getValue();
if (feed == null) {
logger.warning("Could not deserialize SubscriptionDescriptionFeed. skip {}, top: {}", skip, ServiceBusManagementAsyncClient.NUMBER_OF_ELEMENTS);
return Mono.empty();
}
final List<SubscriptionDescription> entities = feed.getEntry().stream()
.filter(e -> e.getContent() != null && e.getContent().getSubscriptionDescription() != null)
.map(e -> {
final String subscriptionName = getTitleValue(e.getTitle());
final SubscriptionDescription description = e.getContent().getSubscriptionDescription();
EntityHelper.setTopicName(description, topicName);
EntityHelper.setSubscriptionName(description, subscriptionName);
return description;
})
.collect(Collectors.toList());
try {
return Mono.just(extractPage(feedResponse, entities, feed.getLink()));
} catch (MalformedURLException | UnsupportedEncodingException error) {
return Mono.error(new RuntimeException("Could not parse response into FeedPage<SubscriptionDescription>",
error));
}
});
}
/**
* Helper method that invokes the service method, extracts the data and translates it to a PagedResponse.
*
* @param skip Number of elements to skip.
* @param context Context for the query.
*
* @return A Mono that completes with a paged response of topics.
*/
private Mono<PagedResponse<TopicDescription>> listTopics(int skip, Context context) {
return managementClient.listEntitiesWithResponseAsync(TOPICS_ENTITY_TYPE, skip, ServiceBusManagementAsyncClient.NUMBER_OF_ELEMENTS, context)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.flatMap(response -> {
final Response<TopicDescriptionFeed> feedResponse = deserialize(response, TopicDescriptionFeed.class);
final TopicDescriptionFeed feed = feedResponse.getValue();
if (feed == null) {
logger.warning("Could not deserialize TopicDescriptionFeed. skip {}, top: {}", skip, ServiceBusManagementAsyncClient.NUMBER_OF_ELEMENTS);
return Mono.empty();
}
final List<TopicDescription> entities = feed.getEntry().stream()
.filter(e -> e.getContent() != null && e.getContent().getTopicDescription() != null)
.map(e -> {
final String topicName = getTitleValue(e.getTitle());
final TopicDescription topicDescription = e.getContent().getTopicDescription();
EntityHelper.setTopicName(topicDescription, topicName);
return topicDescription;
})
.collect(Collectors.toList());
try {
return Mono.just(extractPage(feedResponse, entities, feed.getLink()));
} catch (MalformedURLException | UnsupportedEncodingException error) {
return Mono.error(new RuntimeException("Could not parse response into FeedPage<TopicDescription>",
error));
}
});
}
/**
* Given an XML title element, returns the XML text inside. Jackson deserializes Objects as LinkedHashMaps. XML text
* is represented as an entry with an empty string as the key.
*
* For example, the text returned from this {@code <title text="text/xml">QueueName</title>} is "QueueName".
*
* @param responseTitle XML title element.
*
* @return The XML text inside the title. {@code null} is returned if there is no value.
*/
@SuppressWarnings("unchecked")
private String getTitleValue(Object responseTitle) {
if (!(responseTitle instanceof Map)) {
return null;
}
final Map<String, String> map;
try {
map = (Map<String, String>) responseTitle;
return map.get("");
} catch (ClassCastException error) {
logger.warning("Unable to cast to Map<String,String>. Title: {}", responseTitle, error);
return null;
}
}
/**
* Maps an exception from the ATOM APIs to its associated {@link HttpResponseException}.
*
* @param exception Exception from the ATOM API.
*
* @return The corresponding {@link HttpResponseException} or {@code throwable} if it is not an instance of {@link
* ServiceBusManagementErrorException}.
*/
private static Throwable mapException(Throwable exception) {
if (!(exception instanceof ServiceBusManagementErrorException)) {
return exception;
}
final ServiceBusManagementErrorException managementError = ((ServiceBusManagementErrorException) exception);
final ServiceBusManagementError error = managementError.getValue();
switch (error.getCode()) {
case 401:
return new ClientAuthenticationException(error.getDetail(), managementError.getResponse(), exception);
case 404:
return new ResourceNotFoundException(error.getDetail(), managementError.getResponse(), exception);
case 409:
return new ResourceExistsException(error.getDetail(), managementError.getResponse(), exception);
case 412:
return new ResourceModifiedException(error.getDetail(), managementError.getResponse(), exception);
default:
return new HttpResponseException(error.getDetail(), managementError.getResponse(), exception);
}
}
/**
* A page of Service Bus entities.
*
* @param <T> The entity description from Service Bus.
*/
private static final class FeedPage<T> implements PagedResponse<T> {
private final int statusCode;
private final HttpHeaders header;
private final HttpRequest request;
private final IterableStream<T> entries;
private final String continuationToken;
/**
* Creates a page that does not have any more pages.
*
* @param entries Items in the page.
*/
private FeedPage(int statusCode, HttpHeaders header, HttpRequest request, List<T> entries) {
this.statusCode = statusCode;
this.header = header;
this.request = request;
this.entries = new IterableStream<>(entries);
this.continuationToken = null;
}
/**
* Creates an instance that has additional pages to fetch.
*
* @param entries Items in the page.
* @param skip Number of elements to "skip".
*/
private FeedPage(int statusCode, HttpHeaders header, HttpRequest request, List<T> entries, int skip) {
this.statusCode = statusCode;
this.header = header;
this.request = request;
this.entries = new IterableStream<>(entries);
this.continuationToken = String.valueOf(skip);
}
@Override
public IterableStream<T> getElements() {
return entries;
}
@Override
public String getContinuationToken() {
return continuationToken;
}
@Override
public int getStatusCode() {
return statusCode;
}
@Override
public HttpHeaders getHeaders() {
return header;
}
@Override
public HttpRequest getRequest() {
return request;
}
@Override
public void close() {
}
}
} | class ServiceBusManagementAsyncClient {
private static final String SERVICE_BUS_TRACING_NAMESPACE_VALUE = "Microsoft.ServiceBus";
private static final String CONTENT_TYPE = "application/xml";
private static final String QUEUES_ENTITY_TYPE = "queues";
private static final String TOPICS_ENTITY_TYPE = "topics";
private static final int NUMBER_OF_ELEMENTS = 100;
private final ServiceBusManagementClientImpl managementClient;
private final EntitysImpl entityClient;
private final ClientLogger logger = new ClientLogger(ServiceBusManagementAsyncClient.class);
private final ServiceBusManagementSerializer serializer;
/**
* Creates a new instance with the given management client and serializer.
*
* @param managementClient Client to make management calls.
* @param serializer Serializer to deserialize ATOM XML responses.
*/
ServiceBusManagementAsyncClient(ServiceBusManagementClientImpl managementClient,
ServiceBusManagementSerializer serializer) {
this.managementClient = Objects.requireNonNull(managementClient, "'managementClient' cannot be null.");
this.entityClient = managementClient.getEntitys();
this.serializer = serializer;
}
/**
* Creates a queue with the given name.
*
* @param queueName Name of the queue to create.
*
* @return A Mono that completes with information about the created queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws NullPointerException if {@code queueName} is null.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws ResourceExistsException if a queue exists with the same {@code queueName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueDescription> createQueue(String queueName) {
try {
return createQueue(new QueueDescription(queueName));
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Creates a queue with the {@link QueueDescription}.
*
* @param queue Information about the queue to create.
*
* @return A Mono that completes with information about the created queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws NullPointerException if {@code queue} is null.
* @throws ResourceExistsException if a queue exists with the same {@link QueueDescription
* queueName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueDescription> createQueue(QueueDescription queue) {
return createQueueWithResponse(queue).map(Response::getValue);
}
/**
* Creates a queue and returns the created queue in addition to the HTTP response.
*
* @param queue The queue to create.
*
* @return A Mono that returns the created queue in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws NullPointerException if {@code queue} is null.
* @throws ResourceExistsException if a queue exists with the same {@link QueueDescription
* queueName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueueDescription>> createQueueWithResponse(QueueDescription queue) {
return withContext(context -> createQueueWithResponse(queue, context));
}
/**
* Creates a subscription with the given topic and subscription names.
*
* @param topicName Name of the topic associated with subscription.
* @param subscriptionName Name of the subscription.
*
* @return A Mono that completes with information about the created subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the quota is exceeded, or an error occurred
* processing the request.
* @throws NullPointerException if {@code topicName} or {@code subscriptionName} are are empty strings.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} are are null.
* @throws ResourceExistsException if a subscription exists with the same topic and subscription name.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionDescription> createSubscription(String topicName, String subscriptionName) {
try {
return createSubscription(new SubscriptionDescription(topicName, subscriptionName));
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Creates a subscription with the {@link SubscriptionDescription}.
*
* @param subscription Information about the subscription to create.
*
* @return A Mono that completes with information about the created subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the quota is exceeded, or an error occurred
* processing the request.
* @throws NullPointerException if {@code subscription} is null.
* @throws ResourceExistsException if a subscription exists with the same topic and subscription name.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionDescription> createSubscription(SubscriptionDescription subscription) {
return createSubscriptionWithResponse(subscription).map(Response::getValue);
}
/**
* Creates a queue and returns the created queue in addition to the HTTP response.
*
* @param subscription Information about the subscription to create.
*
* @return A Mono that returns the created queue in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the quota is exceeded, or an error occurred
* processing the request.
* @throws NullPointerException if {@code subscription} is null.
* @throws ResourceExistsException if a subscription exists with the same topic and subscription name.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SubscriptionDescription>> createSubscriptionWithResponse(
SubscriptionDescription subscription) {
return withContext(context -> createSubscriptionWithResponse(subscription, context));
}
/**
* Creates a topic with the given name.
*
* @param topicName Name of the topic to create.
*
* @return A Mono that completes with information about the created topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws NullPointerException if {@code topicName} is null.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws ResourceExistsException if a topic exists with the same {@code topicName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicDescription> createTopic(String topicName) {
try {
return createTopic(new TopicDescription(topicName));
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Creates a topic with the {@link TopicDescription}.
*
* @param topic Information about the topic to create.
*
* @return A Mono that completes with information about the created topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link TopicDescription
* string.
* @throws NullPointerException if {@code topic} is null.
* @throws ResourceExistsException if a topic exists with the same {@link TopicDescription
* topicName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicDescription> createTopic(TopicDescription topic) {
return createTopicWithResponse(topic).map(Response::getValue);
}
/**
* Creates a topic and returns the created topic in addition to the HTTP response.
*
* @param topic The topic to create.
*
* @return A Mono that returns the created topic in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link TopicDescription
* string.
* @throws NullPointerException if {@code topic} is null.
* @throws ResourceExistsException if a topic exists with the same {@link TopicDescription
* topicName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopicDescription>> createTopicWithResponse(TopicDescription topic) {
return withContext(context -> createTopicWithResponse(topic, context));
}
/**
* Deletes a queue the matching {@code queueName}.
*
* @param queueName Name of queue to delete.
*
* @return A Mono that completes when the queue is deleted.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws NullPointerException if {@code queueName} is null.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteQueue(String queueName) {
return deleteQueueWithResponse(queueName).then();
}
/**
* Deletes a queue the matching {@code queueName} and returns the HTTP response.
*
* @param queueName Name of queue to delete.
*
* @return A Mono that completes when the queue is deleted and returns the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws NullPointerException if {@code queueName} is null.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> deleteQueueWithResponse(String queueName) {
return withContext(context -> deleteQueueWithResponse(queueName, context));
}
/**
* Deletes a subscription the matching {@code subscriptionName}.
*
* @param topicName Name of topic associated with subscription to delete.
* @param subscriptionName Name of subscription to delete.
*
* @return A Mono that completes when the subscription is deleted.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} is an empty string.
* @throws NullPointerException if {@code topicName} or {@code subscriptionName} is null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteSubscription(String topicName, String subscriptionName) {
return deleteSubscriptionWithResponse(topicName, subscriptionName).then();
}
/**
* Deletes a subscription the matching {@code subscriptionName} and returns the HTTP response.
*
* @param topicName Name of topic associated with subscription to delete.
* @param subscriptionName Name of subscription to delete.
*
* @return A Mono that completes when the subscription is deleted and returns the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} is an empty string.
* @throws NullPointerException if {@code topicName} or {@code subscriptionName} is null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> deleteSubscriptionWithResponse(String topicName, String subscriptionName) {
return withContext(context -> deleteSubscriptionWithResponse(topicName, subscriptionName, context));
}
/**
* Deletes a topic the matching {@code topicName}.
*
* @param topicName Name of topic to delete.
*
* @return A Mono that completes when the topic is deleted.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteTopic(String topicName) {
return deleteTopicWithResponse(topicName).then();
}
/**
* Deletes a topic the matching {@code topicName} and returns the HTTP response.
*
* @param topicName Name of topic to delete.
*
* @return A Mono that completes when the topic is deleted and returns the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> deleteTopicWithResponse(String topicName) {
return withContext(context -> deleteTopicWithResponse(topicName, context));
}
/**
* Gets information about the queue.
*
* @param queueName Name of queue to get information about.
*
* @return A Mono that completes with information about the queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueDescription> getQueue(String queueName) {
return getQueueWithResponse(queueName).map(Response::getValue);
}
/**
* Gets information about the queue along with its HTTP response.
*
* @param queueName Name of queue to get information about.
*
* @return A Mono that completes with information about the queue and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueueDescription>> getQueueWithResponse(String queueName) {
return withContext(context -> getQueueWithResponse(queueName, context, Function.identity()));
}
/**
* Gets runtime information about the queue.
*
* @param queueName Name of queue to get information about.
*
* @return A Mono that completes with runtime information about the queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueRuntimeInfo> getQueueRuntimeInfo(String queueName) {
return getQueueRuntimeInfoWithResponse(queueName).map(response -> response.getValue());
}
/**
* Gets runtime information about the queue along with its HTTP response.
*
* @param queueName Name of queue to get information about.
*
* @return A Mono that completes with runtime information about the queue and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code queueName} is an empty string.
* @throws NullPointerException if {@code queueName} is null.
* @throws ResourceNotFoundException if the {@code queueName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueueRuntimeInfo>> getQueueRuntimeInfoWithResponse(String queueName) {
return withContext(context -> getQueueWithResponse(queueName, context, QueueRuntimeInfo::new));
}
/**
* Gets information about the queue.
*
* @param topicName Name of topic associated with subscription.
* @param subscriptionName Name of subscription to get information about.
*
* @return A Mono that completes with information about the subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} are empty strings.
* @throws NullPointerException if {@code topicName} or {@code subscriptionName} are null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist in the {@code topicName}.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionDescription> getSubscription(String topicName, String subscriptionName) {
return getSubscriptionWithResponse(topicName, subscriptionName).map(Response::getValue);
}
/**
* Gets information about the subscription along with its HTTP response.
*
* @param topicName Name of topic associated with subscription.
* @param subscriptionName Name of subscription to get information about.
*
* @return A Mono that completes with information about the subscription and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} are empty strings.
* @throws NullPointerException if {@code topicName} or {@code subscriptionName} are null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SubscriptionDescription>> getSubscriptionWithResponse(String topicName,
String subscriptionName) {
return withContext(context -> getSubscriptionWithResponse(topicName, subscriptionName, context,
Function.identity()));
}
/**
* Gets runtime information about the subscription.
*
* @param topicName Name of topic associated with subscription.
* @param subscriptionName Name of subscription to get information about.
*
* @return A Mono that completes with runtime information about the subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} or {@code subscriptionName} are empty strings.
* @throws NullPointerException if {@code topicName} or {@code subscriptionName} are null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionRuntimeInfo> getSubscriptionRuntimeInfo(String topicName, String subscriptionName) {
return getSubscriptionRuntimeInfoWithResponse(topicName, subscriptionName)
.map(response -> response.getValue());
}
/**
* Gets runtime information about the subscription.
*
* @param topicName Name of topic associated with subscription.
* @param subscriptionName Name of subscription to get information about.
*
* @return A Mono that completes with runtime information about the subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code subscriptionName} is an empty string.
* @throws NullPointerException if {@code subscriptionName} is null.
* @throws ResourceNotFoundException if the {@code subscriptionName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SubscriptionRuntimeInfo>> getSubscriptionRuntimeInfoWithResponse(String topicName,
String subscriptionName) {
return withContext(context -> getSubscriptionWithResponse(topicName, subscriptionName, context,
SubscriptionRuntimeInfo::new));
}
/**
* Gets information about the topic.
*
* @param topicName Name of topic to get information about.
*
* @return A Mono that completes with information about the topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicDescription> getTopic(String topicName) {
return getTopicWithResponse(topicName).map(Response::getValue);
}
/**
* Gets information about the topic along with its HTTP response.
*
* @param topicName Name of topic to get information about.
*
* @return A Mono that completes with information about the topic and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopicDescription>> getTopicWithResponse(String topicName) {
return withContext(context -> getTopicWithResponse(topicName, context, Function.identity()));
}
/**
* Gets runtime information about the topic.
*
* @param topicName Name of topic to get information about.
*
* @return A Mono that completes with runtime information about the topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicRuntimeInfo> getTopicRuntimeInfo(String topicName) {
return getTopicRuntimeInfoWithResponse(topicName).map(response -> response.getValue());
}
/**
* Gets runtime information about the topic with its HTTP response.
*
* @param topicName Name of topic to get information about.
*
* @return A Mono that completes with runtime information about the topic and the associated HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If error occurred processing the request.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @throws NullPointerException if {@code topicName} is null.
* @throws ResourceNotFoundException if the {@code topicName} does not exist.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopicRuntimeInfo>> getTopicRuntimeInfoWithResponse(String topicName) {
return withContext(context -> getTopicWithResponse(topicName, context, TopicRuntimeInfo::new));
}
/**
* Fetches all the queues in the Service Bus namespace.
*
* @return A Flux of {@link QueueDescription queues} in the Service Bus namespace.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @see <a href="https:
* authorization rules</a>
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<QueueDescription> listQueues() {
return new PagedFlux<>(
() -> withContext(context -> listQueuesFirstPage(context)),
token -> withContext(context -> listQueuesNextPage(token, context)));
}
/**
* Fetches all the subscriptions for a topic.
*
* @param topicName The topic name under which all the subscriptions need to be retrieved.
*
* @return A Flux of {@link SubscriptionDescription subscriptions} for the {@code topicName}.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws NullPointerException if {@code topicName} is null.
* @throws IllegalArgumentException if {@code topicName} is an empty string.
* @see <a href="https:
* authorization rules</a>
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<SubscriptionDescription> listSubscriptions(String topicName) {
if (topicName == null) {
return pagedFluxError(logger, new NullPointerException("'topicName' cannot be null."));
} else if (topicName.isEmpty()) {
return pagedFluxError(logger, new IllegalArgumentException("'topicName' cannot be an empty string."));
}
return new PagedFlux<>(
() -> withContext(context -> listSubscriptionsFirstPage(topicName, context)),
token -> withContext(context -> listSubscriptionsNextPage(topicName, token, context)));
}
/**
* Fetches all the topics in the Service Bus namespace.
*
* @return A Flux of {@link TopicDescription topics} in the Service Bus namespace.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @see <a href="https:
* authorization rules</a>
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<TopicDescription> listTopics() {
return new PagedFlux<>(
() -> withContext(context -> listTopicsFirstPage(context)),
token -> withContext(context -> listTopicsNextPage(token, context)));
}
/**
* Updates a queue with the given {@link QueueDescription}. The {@link QueueDescription} must be fully populated as
* all of the properties are replaced. If a property is not set the service default value is used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. More information can be found in the links below. They are:
* <ul>
* <li>{@link QueueDescription
* <li>{@link QueueDescription
* <li>{@link QueueDescription
* </li>
* <li>{@link QueueDescription
* </ul>
*
* @param queue Information about the queue to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that completes with the updated queue.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws NullPointerException if {@code queue} is null.
* @see <a href="https:
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueueDescription> updateQueue(QueueDescription queue) {
return updateQueueWithResponse(queue).map(Response::getValue);
}
/**
* Updates a queue with the given {@link QueueDescription}. The {@link QueueDescription} must be fully populated as
* all of the properties are replaced. If a property is not set the service default value is used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. More information can be found in the links below. They are:
* <ul>
* <li>{@link QueueDescription
* <li>{@link QueueDescription
* <li>{@link QueueDescription
* </li>
* <li>{@link QueueDescription
* </ul>
*
* @param queue Information about the queue to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that returns the updated queue in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the queue quota is exceeded, or an error
* occurred processing the request.
* @throws NullPointerException if {@code queue} is null.
* @see <a href="https:
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueueDescription>> updateQueueWithResponse(QueueDescription queue) {
return withContext(context -> updateQueueWithResponse(queue, context));
}
/**
* Updates a subscription with the given {@link SubscriptionDescription}. The {@link SubscriptionDescription} must
* be fully populated as all of the properties are replaced. If a property is not set the service default value is
* used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. More information can be found in the links below. They are:
* <ul>
* <li>{@link SubscriptionDescription
* <li>{@link SubscriptionDescription
* <li>{@link SubscriptionDescription
* </ul>
*
* @param subscription Information about the subscription to update. You must provide all the property values
* that are desired on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that returns the updated subscription.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the subscription quota is exceeded, or an
* error occurred processing the request.
* @throws IllegalArgumentException if {@link SubscriptionDescription
* SubscriptionDescription
* @throws NullPointerException if {@code subscription} is null.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SubscriptionDescription> updateSubscription(SubscriptionDescription subscription) {
return updateSubscriptionWithResponse(subscription).map(Response::getValue);
}
/**
* Updates a subscription with the given {@link SubscriptionDescription}. The {@link SubscriptionDescription} must
* be fully populated as all of the properties are replaced. If a property is not set the service default value is
* used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. More information can be found in the links below. They are:
* <ul>
* <li>{@link SubscriptionDescription
* <li>{@link SubscriptionDescription
* <li>{@link SubscriptionDescription
* </ul>
*
* @param subscription Information about the subscription to update. You must provide all the property values
* that are desired on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that returns the updated subscription in addition to the HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the subscription quota is exceeded, or an
* error occurred processing the request.
* @throws IllegalArgumentException if {@link SubscriptionDescription
* SubscriptionDescription
* @throws NullPointerException if {@code subscription} is null.
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SubscriptionDescription>> updateSubscriptionWithResponse(
SubscriptionDescription subscription) {
return withContext(context -> updateSubscriptionWithResponse(subscription, context));
}
/**
* Updates a topic with the given {@link TopicDescription}. The {@link TopicDescription} must be fully populated as
* all of the properties are replaced. If a property is not set the service default value is used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. More information can be found in the links below. They are:
* <ul>
* <li>{@link TopicDescription
* <li>{@link TopicDescription
* </li>
* </ul>
*
* @param topic Information about the topic to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that completes with the updated topic.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link TopicDescription
* string.
* @throws NullPointerException if {@code topic} is null.
* @see <a href="https:
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopicDescription> updateTopic(TopicDescription topic) {
return updateTopicWithResponse(topic).map(Response::getValue);
}
/**
* Updates a topic with the given {@link TopicDescription}. The {@link TopicDescription} must be fully populated as
* all of the properties are replaced. If a property is not set the service default value is used.
*
* The suggested flow is:
* <ol>
* <li>{@link
* <li>Update the required elements.</li>
* <li>Pass the updated description into this method.</li>
* </ol>
*
* <p>
* There are a subset of properties that can be updated. More information can be found in the links below. They are:
* <ul>
* <li>{@link TopicDescription
* <li>{@link TopicDescription
* </li>
* </ul>
*
* @param topic Information about the topic to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
*
* @return A Mono that completes with the updated topic and its HTTP response.
* @throws ClientAuthenticationException if the client's credentials do not have access to modify the
* namespace.
* @throws HttpResponseException If the request body was invalid, the topic quota is exceeded, or an error
* occurred processing the request.
* @throws IllegalArgumentException if {@link TopicDescription
* string.
* @throws NullPointerException if {@code topic} is null.
* @see <a href="https:
* @see <a href="https:
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopicDescription>> updateTopicWithResponse(TopicDescription topic) {
return withContext(context -> updateTopicWithResponse(topic, context));
}
/**
* Creates a queue with its context.
*
* @param queue Queue to create.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link QueueDescription}.
*/
Mono<Response<QueueDescription>> createQueueWithResponse(QueueDescription queue, Context context) {
if (queue == null) {
return monoError(logger, new NullPointerException("'queue' cannot be null"));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateQueueBodyContent content = new CreateQueueBodyContent()
.setType(CONTENT_TYPE)
.setQueueDescription(queue);
final CreateQueueBody createEntity = new CreateQueueBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(queue.getName(), createEntity, null, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(this::deserializeQueue);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a subscription with its context.
*
* @param subscription Subscription to create.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link SubscriptionDescription}.
*/
Mono<Response<SubscriptionDescription>> createSubscriptionWithResponse(SubscriptionDescription subscription,
Context context) {
if (subscription == null) {
return monoError(logger, new NullPointerException("'subscription' cannot be null."));
}
final CreateSubscriptionBodyContent content = new CreateSubscriptionBodyContent()
.setType(CONTENT_TYPE)
.setSubscriptionDescription(subscription);
final CreateSubscriptionBody createEntity = new CreateSubscriptionBody().setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return managementClient.getSubscriptions().putWithResponseAsync(subscription.getTopicName(),
subscription.getSubscriptionName(), createEntity, null, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeSubscription(subscription.getTopicName(), response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a topic with its context.
*
* @param topic Topic to create.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link TopicDescription}.
*/
Mono<Response<TopicDescription>> createTopicWithResponse(TopicDescription topic, Context context) {
if (topic == null) {
final CreateTopicBodyContent content = new CreateTopicBodyContent()
.setType(CONTENT_TYPE)
.setTopicDescription(topic);
final CreateTopicBody createEntity = new CreateTopicBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(topic.getName(), createEntity, null, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(this::deserializeTopic);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Deletes a queue with its context.
*
* @param queueName Name of queue to delete.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link QueueDescription}.
*/
Mono<Response<Void>> deleteQueueWithResponse(String queueName, Context context) {
if (queueName == null) {
return monoError(logger, new NullPointerException("'queueName' cannot be null"));
} else if (queueName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'queueName' cannot be an empty string."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.deleteWithResponseAsync(queueName, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> {
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), null);
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Deletes a subscription with its context.
*
* @param topicName Name of topic associated with subscription to delete.
* @param subscriptionName Name of subscription to delete.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link SubscriptionDescription}.
*/
Mono<Response<Void>> deleteSubscriptionWithResponse(String topicName, String subscriptionName, Context context) {
if (subscriptionName == null) {
return monoError(logger, new NullPointerException("'subscriptionName' cannot be null"));
} else if (subscriptionName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'subscriptionName' cannot be an empty string."));
} else if (topicName == null) {
return monoError(logger, new NullPointerException("'topicName' cannot be null"));
} else if (topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topicName' cannot be an empty string."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return managementClient.getSubscriptions().deleteWithResponseAsync(topicName, subscriptionName,
withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), null));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Deletes a topic with its context.
*
* @param topicName Name of topic to delete.
* @param context Context to pass into request.
*
* @return A Mono that completes with the created {@link TopicDescription}.
*/
Mono<Response<Void>> deleteTopicWithResponse(String topicName, Context context) {
if (topicName == null) {
return monoError(logger, new NullPointerException("'topicName' cannot be null"));
} else if (topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topicName' cannot be an empty string."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.deleteWithResponseAsync(topicName, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), null));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets a queue with its context.
*
* @param queueName Name of queue to fetch information for.
* @param context Context to pass into request.
*
* @return A Mono that completes with the {@link QueueDescription}.
*/
<T> Mono<Response<T>> getQueueWithResponse(String queueName, Context context,
Function<QueueDescription, T> mapper) {
if (queueName == null) {
return monoError(logger, new NullPointerException("'queueName' cannot be null"));
} else if (queueName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'queueName' cannot be empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.getWithResponseAsync(queueName, true, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.handle((response, sink) -> {
final Response<QueueDescription> deserialize = deserializeQueue(response);
if (deserialize.getValue() == null) {
final HttpResponse notFoundResponse = new EntityNotFoundHttpResponse<>(deserialize);
sink.error(new ResourceNotFoundException(String.format("Queue '%s' does not exist.", queueName),
notFoundResponse));
} else {
final T mapped = mapper.apply(deserialize.getValue());
sink.next(new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), mapped));
}
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets a subscription with its context.
*
* @param topicName Name of the topic associated with the subscription.
* @param subscriptionName Name of subscription to fetch information for.
* @param context Context to pass into request.
*
* @return A Mono that completes with the {@link SubscriptionDescription}.
*/
<T> Mono<Response<T>> getSubscriptionWithResponse(String topicName, String subscriptionName, Context context,
Function<SubscriptionDescription, T> mapper) {
if (topicName == null) {
return monoError(logger, new NullPointerException("'topicName' cannot be null."));
} else if (topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topicName' cannot be an empty string."));
} else if (subscriptionName == null) {
return monoError(logger, new NullPointerException("'subscriptionName' cannot be null."));
} else if (subscriptionName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'subscriptionName' cannot be an empty string."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return managementClient.getSubscriptions().getWithResponseAsync(topicName, subscriptionName, true,
withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.handle((response, sink) -> {
final Response<SubscriptionDescription> deserialize = deserializeSubscription(topicName, response);
if (deserialize.getValue() == null) {
final HttpResponse notFoundResponse = new EntityNotFoundHttpResponse<>(deserialize);
sink.error(new ResourceNotFoundException(String.format(
"Subscription '%s' in topic '%s' does not exist.", topicName, subscriptionName),
notFoundResponse));
} else {
final T mapped = mapper.apply(deserialize.getValue());
sink.next(new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), mapped));
}
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets a topic with its context.
*
* @param topicName Name of topic to fetch information for.
* @param context Context to pass into request.
*
* @return A Mono that completes with the {@link TopicDescription}.
*/
<T> Mono<Response<T>> getTopicWithResponse(String topicName, Context context,
Function<TopicDescription, T> mapper) {
if (topicName == null) {
return monoError(logger, new NullPointerException("'topicName' cannot be null"));
} else if (topicName.isEmpty()) {
return monoError(logger, new IllegalArgumentException("'topicName' cannot be empty."));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.getWithResponseAsync(topicName, true, withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.handle((response, sink) -> {
final Response<TopicDescription> deserialize = deserializeTopic(response);
if (deserialize.getValue() == null) {
final HttpResponse notFoundResponse = new EntityNotFoundHttpResponse<>(deserialize);
sink.error(new ResourceNotFoundException(String.format("Topic '%s' does not exist.", topicName),
notFoundResponse));
} else {
final T mapped = mapper.apply(deserialize.getValue());
sink.next(new SimpleResponse<>(response.getRequest(), response.getStatusCode(),
response.getHeaders(), mapped));
}
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the first page of queues with context.
*
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of queues.
*/
Mono<PagedResponse<QueueDescription>> listQueuesFirstPage(Context context) {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return listQueues(0, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the next page of queues with context.
*
* @param continuationToken Number of items to skip in feed.
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of queues or empty if there are no items left.
*/
Mono<PagedResponse<QueueDescription>> listQueuesNextPage(String continuationToken, Context context) {
if (continuationToken == null || continuationToken.isEmpty()) {
return Mono.empty();
}
try {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
final int skip = Integer.parseInt(continuationToken);
return listQueues(skip, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the first page of subscriptions with context.
*
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of subscriptions.
*/
Mono<PagedResponse<SubscriptionDescription>> listSubscriptionsFirstPage(String topicName, Context context) {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return listSubscriptions(topicName, 0, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the next page of subscriptions with context.
*
* @param continuationToken Number of items to skip in feed.
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of subscriptions or empty if there are no items left.
*/
Mono<PagedResponse<SubscriptionDescription>> listSubscriptionsNextPage(String topicName, String continuationToken,
Context context) {
if (continuationToken == null || continuationToken.isEmpty()) {
return Mono.empty();
}
try {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
final int skip = Integer.parseInt(continuationToken);
return listSubscriptions(topicName, skip, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the first page of topics with context.
*
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of topics.
*/
Mono<PagedResponse<TopicDescription>> listTopicsFirstPage(Context context) {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return listTopics(0, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Gets the next page of topics with context.
*
* @param continuationToken Number of items to skip in feed.
* @param context Context to pass into request.
*
* @return A Mono that completes with a page of topics or empty if there are no items left.
*/
Mono<PagedResponse<TopicDescription>> listTopicsNextPage(String continuationToken, Context context) {
if (continuationToken == null || continuationToken.isEmpty()) {
return Mono.empty();
}
try {
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
final int skip = Integer.parseInt(continuationToken);
return listTopics(skip, withTracing);
} catch (RuntimeException e) {
return monoError(logger, e);
}
}
/**
* Updates a queue with its context.
*
* @param queue Information about the queue to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
* @param context Context to pass into request.
*
* @return A Mono that completes with the updated {@link QueueDescription}.
*/
Mono<Response<QueueDescription>> updateQueueWithResponse(QueueDescription queue, Context context) {
if (queue == null) {
return monoError(logger, new NullPointerException("'queue' cannot be null"));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateQueueBodyContent content = new CreateQueueBodyContent()
.setType(CONTENT_TYPE)
.setQueueDescription(queue);
final CreateQueueBody createEntity = new CreateQueueBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(queue.getName(), createEntity, "*", withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeQueue(response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Updates a subscription with its context.
*
* @param subscription Information about the subscription to update. You must provide all the property values
* that are desired on the updated entity. Any values not provided are set to the service default values.
* @param context Context to pass into request.
*
* @return A Mono that completes with the updated {@link SubscriptionDescription}.
*/
Mono<Response<SubscriptionDescription>> updateSubscriptionWithResponse(SubscriptionDescription subscription,
Context context) {
if (subscription == null) {
return monoError(logger, new NullPointerException("'subscription' cannot be null"));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final String topicName = subscription.getTopicName();
final String subscriptionName = subscription.getSubscriptionName();
final CreateSubscriptionBodyContent content = new CreateSubscriptionBodyContent()
.setType(CONTENT_TYPE)
.setSubscriptionDescription(subscription);
final CreateSubscriptionBody createEntity = new CreateSubscriptionBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return managementClient.getSubscriptions().putWithResponseAsync(topicName, subscriptionName, createEntity,
"*", withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeSubscription(topicName, response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Updates a topic with its context.
*
* @param topic Information about the topic to update. You must provide all the property values that are desired
* on the updated entity. Any values not provided are set to the service default values.
* @param context Context to pass into request.
*
* @return A Mono that completes with the updated {@link TopicDescription}.
*/
Mono<Response<TopicDescription>> updateTopicWithResponse(TopicDescription topic, Context context) {
if (topic == null) {
return monoError(logger, new NullPointerException("'topic' cannot be null"));
} else if (context == null) {
return monoError(logger, new NullPointerException("'context' cannot be null."));
}
final CreateTopicBodyContent content = new CreateTopicBodyContent()
.setType(CONTENT_TYPE)
.setTopicDescription(topic);
final CreateTopicBody createEntity = new CreateTopicBody()
.setContent(content);
final Context withTracing = context.addData(AZ_TRACING_NAMESPACE_KEY, SERVICE_BUS_TRACING_NAMESPACE_VALUE);
try {
return entityClient.putWithResponseAsync(topic.getName(), createEntity, "*", withTracing)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.map(response -> deserializeTopic(response));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
private <T> T deserialize(Object object, Class<T> clazz) {
if (object == null) {
return null;
}
final String contents = String.valueOf(object);
if (contents.isEmpty()) {
return null;
}
try {
return serializer.deserialize(contents, clazz);
} catch (IOException e) {
throw logger.logExceptionAsError(new RuntimeException(String.format(
"Exception while deserializing. Body: [%s]. Class: %s", contents, clazz), e));
}
}
/**
* Given an HTTP response, will deserialize it into a strongly typed Response object.
*
* @param response HTTP response to deserialize response body from.
* @param clazz Class to deserialize response type into.
* @param <T> Class type to deserialize response into.
*
* @return A Response with a strongly typed response value.
*/
private <T> Response<T> deserialize(Response<Object> response, Class<T> clazz) {
final T deserialize = deserialize(response.getValue(), clazz);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
deserialize);
}
/**
* Converts a Response into its corresponding {@link QueueDescriptionEntry} then mapped into {@link
* QueueDescription}.
*
* @param response HTTP Response to deserialize.
*
* @return The corresponding HTTP response with convenience properties set.
*/
private Response<QueueDescription> deserializeQueue(Response<Object> response) {
final QueueDescriptionEntry entry = deserialize(response.getValue(), QueueDescriptionEntry.class);
if (entry == null) {
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
} else if (entry.getContent() == null) {
logger.info("entry.getContent() is null. There should have been content returned. Entry: {}", entry);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
}
final QueueDescription result = entry.getContent().getQueueDescription();
final String queueName = getTitleValue(entry.getTitle());
EntityHelper.setQueueName(result, queueName);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), result);
}
/**
* Converts a Response into its corresponding {@link SubscriptionDescriptionEntry} then mapped into {@link
* SubscriptionDescription}.
*
* @param response HTTP Response to deserialize.
*
* @return The corresponding HTTP response with convenience properties set.
*/
private Response<SubscriptionDescription> deserializeSubscription(String topicName, Response<Object> response) {
final SubscriptionDescriptionEntry entry = deserialize(response.getValue(), SubscriptionDescriptionEntry.class);
if (entry == null) {
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
} else if (entry.getContent() == null) {
logger.warning("entry.getContent() is null. There should have been content returned. Entry: {}", entry);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
}
final SubscriptionDescription subscription = entry.getContent().getSubscriptionDescription();
final String subscriptionName = getTitleValue(entry.getTitle());
EntityHelper.setSubscriptionName(subscription, subscriptionName);
EntityHelper.setTopicName(subscription, topicName);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
subscription);
}
/**
* Converts a Response into its corresponding {@link TopicDescriptionEntry} then mapped into {@link
* QueueDescription}.
*
* @param response HTTP Response to deserialize.
*
* @return The corresponding HTTP response with convenience properties set.
*/
private Response<TopicDescription> deserializeTopic(Response<Object> response) {
final TopicDescriptionEntry entry = deserialize(response.getValue(), TopicDescriptionEntry.class);
if (entry == null) {
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
} else if (entry.getContent() == null) {
logger.warning("entry.getContent() is null. There should have been content returned. Entry: {}", entry);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
}
final TopicDescription result = entry.getContent().getTopicDescription();
final String queueName = getTitleValue(entry.getTitle());
EntityHelper.setTopicName(result, queueName);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), result);
}
/**
* Creates a {@link FeedPage} given the elements and a set of response links to get the next link from.
*
* @param entities Entities in the feed.
* @param responseLinks Links returned from the feed.
* @param <TResult> Type of Service Bus entities in page.
*
* @return A {@link FeedPage} indicating whether this can be continued or not.
* @throws MalformedURLException if the "next" page link does not contain a well-formed URL.
*/
private <TResult, TFeed> FeedPage<TResult> extractPage(Response<TFeed> response, List<TResult> entities,
List<ResponseLink> responseLinks)
throws MalformedURLException, UnsupportedEncodingException {
final Optional<ResponseLink> nextLink = responseLinks.stream()
.filter(link -> link.getRel().equalsIgnoreCase("next"))
.findFirst();
if (!nextLink.isPresent()) {
return new FeedPage<>(response.getStatusCode(), response.getHeaders(), response.getRequest(), entities);
}
final URL url = new URL(nextLink.get().getHref());
final String decode = URLDecoder.decode(url.getQuery(), StandardCharsets.UTF_8.name());
final Optional<Integer> skipParameter = Arrays.stream(decode.split("&|&"))
.map(part -> part.split("=", 2))
.filter(parts -> parts[0].equalsIgnoreCase("$skip") && parts.length == 2)
.map(parts -> Integer.valueOf(parts[1]))
.findFirst();
if (skipParameter.isPresent()) {
return new FeedPage<>(response.getStatusCode(), response.getHeaders(), response.getRequest(), entities,
skipParameter.get());
} else {
logger.warning("There should have been a skip parameter for the next page.");
return new FeedPage<>(response.getStatusCode(), response.getHeaders(), response.getRequest(), entities);
}
}
/**
* Helper method that invokes the service method, extracts the data and translates it to a PagedResponse.
*
* @param skip Number of elements to skip.
* @param context Context for the query.
*
* @return A Mono that completes with a paged response of queues.
*/
private Mono<PagedResponse<QueueDescription>> listQueues(int skip, Context context) {
return managementClient.listEntitiesWithResponseAsync(QUEUES_ENTITY_TYPE, skip, NUMBER_OF_ELEMENTS, context)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.flatMap(response -> {
final Response<QueueDescriptionFeed> feedResponse = deserialize(response, QueueDescriptionFeed.class);
final QueueDescriptionFeed feed = feedResponse.getValue();
if (feed == null) {
logger.warning("Could not deserialize QueueDescriptionFeed. skip {}, top: {}", skip,
NUMBER_OF_ELEMENTS);
return Mono.empty();
}
final List<QueueDescription> entities = feed.getEntry().stream()
.filter(e -> e.getContent() != null && e.getContent().getQueueDescription() != null)
.map(e -> {
final String queueName = getTitleValue(e.getTitle());
final QueueDescription queueDescription = e.getContent().getQueueDescription();
EntityHelper.setQueueName(queueDescription, queueName);
return queueDescription;
})
.collect(Collectors.toList());
try {
return Mono.just(extractPage(feedResponse, entities, feed.getLink()));
} catch (MalformedURLException | UnsupportedEncodingException error) {
return Mono.error(new RuntimeException("Could not parse response into FeedPage<QueueDescription>",
error));
}
});
}
/**
* Helper method that invokes the service method, extracts the data and translates it to a PagedResponse.
*
* @param skip Number of elements to skip.
* @param context Context for the query.
*
* @return A Mono that completes with a paged response of subscriptions.
*/
private Mono<PagedResponse<SubscriptionDescription>> listSubscriptions(String topicName, int skip,
Context context) {
return managementClient.listSubscriptionsWithResponseAsync(topicName, skip, NUMBER_OF_ELEMENTS, context)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.flatMap(response -> {
final Response<SubscriptionDescriptionFeed> feedResponse = deserialize(response,
SubscriptionDescriptionFeed.class);
final SubscriptionDescriptionFeed feed = feedResponse.getValue();
if (feed == null) {
logger.warning("Could not deserialize SubscriptionDescriptionFeed. skip {}, top: {}", skip,
NUMBER_OF_ELEMENTS);
return Mono.empty();
}
final List<SubscriptionDescription> entities = feed.getEntry().stream()
.filter(e -> e.getContent() != null && e.getContent().getSubscriptionDescription() != null)
.map(e -> {
final String subscriptionName = getTitleValue(e.getTitle());
final SubscriptionDescription description = e.getContent().getSubscriptionDescription();
EntityHelper.setTopicName(description, topicName);
EntityHelper.setSubscriptionName(description, subscriptionName);
return description;
})
.collect(Collectors.toList());
try {
return Mono.just(extractPage(feedResponse, entities, feed.getLink()));
} catch (MalformedURLException | UnsupportedEncodingException error) {
return Mono.error(new RuntimeException(
"Could not parse response into FeedPage<SubscriptionDescription>", error));
}
});
}
/**
* Helper method that invokes the service method, extracts the data and translates it to a PagedResponse.
*
* @param skip Number of elements to skip.
* @param context Context for the query.
*
* @return A Mono that completes with a paged response of topics.
*/
private Mono<PagedResponse<TopicDescription>> listTopics(int skip, Context context) {
return managementClient.listEntitiesWithResponseAsync(TOPICS_ENTITY_TYPE, skip, NUMBER_OF_ELEMENTS, context)
.onErrorMap(ServiceBusManagementAsyncClient::mapException)
.flatMap(response -> {
final Response<TopicDescriptionFeed> feedResponse = deserialize(response, TopicDescriptionFeed.class);
final TopicDescriptionFeed feed = feedResponse.getValue();
if (feed == null) {
logger.warning("Could not deserialize TopicDescriptionFeed. skip {}, top: {}", skip,
NUMBER_OF_ELEMENTS);
return Mono.empty();
}
final List<TopicDescription> entities = feed.getEntry().stream()
.filter(e -> e.getContent() != null && e.getContent().getTopicDescription() != null)
.map(e -> {
final String topicName = getTitleValue(e.getTitle());
final TopicDescription topicDescription = e.getContent().getTopicDescription();
EntityHelper.setTopicName(topicDescription, topicName);
return topicDescription;
})
.collect(Collectors.toList());
try {
return Mono.just(extractPage(feedResponse, entities, feed.getLink()));
} catch (MalformedURLException | UnsupportedEncodingException error) {
return Mono.error(new RuntimeException("Could not parse response into FeedPage<TopicDescription>",
error));
}
});
}
/**
* Given an XML title element, returns the XML text inside. Jackson deserializes Objects as LinkedHashMaps. XML text
* is represented as an entry with an empty string as the key.
*
* For example, the text returned from this {@code <title text="text/xml">QueueName</title>} is "QueueName".
*
* @param responseTitle XML title element.
*
* @return The XML text inside the title. {@code null} is returned if there is no value.
*/
@SuppressWarnings("unchecked")
private String getTitleValue(Object responseTitle) {
if (!(responseTitle instanceof Map)) {
return null;
}
final Map<String, String> map;
try {
map = (Map<String, String>) responseTitle;
return map.get("");
} catch (ClassCastException error) {
logger.warning("Unable to cast to Map<String,String>. Title: {}", responseTitle, error);
return null;
}
}
/**
* Maps an exception from the ATOM APIs to its associated {@link HttpResponseException}.
*
* @param exception Exception from the ATOM API.
*
* @return The corresponding {@link HttpResponseException} or {@code throwable} if it is not an instance of {@link
* ServiceBusManagementErrorException}.
*/
private static Throwable mapException(Throwable exception) {
if (!(exception instanceof ServiceBusManagementErrorException)) {
return exception;
}
final ServiceBusManagementErrorException managementError = ((ServiceBusManagementErrorException) exception);
final ServiceBusManagementError error = managementError.getValue();
switch (error.getCode()) {
case 401:
return new ClientAuthenticationException(error.getDetail(), managementError.getResponse(), exception);
case 404:
return new ResourceNotFoundException(error.getDetail(), managementError.getResponse(), exception);
case 409:
return new ResourceExistsException(error.getDetail(), managementError.getResponse(), exception);
case 412:
return new ResourceModifiedException(error.getDetail(), managementError.getResponse(), exception);
default:
return new HttpResponseException(error.getDetail(), managementError.getResponse(), exception);
}
}
/**
* A page of Service Bus entities.
*
* @param <T> The entity description from Service Bus.
*/
private static final class FeedPage<T> implements PagedResponse<T> {
private final int statusCode;
private final HttpHeaders header;
private final HttpRequest request;
private final IterableStream<T> entries;
private final String continuationToken;
/**
* Creates a page that does not have any more pages.
*
* @param entries Items in the page.
*/
private FeedPage(int statusCode, HttpHeaders header, HttpRequest request, List<T> entries) {
this.statusCode = statusCode;
this.header = header;
this.request = request;
this.entries = new IterableStream<>(entries);
this.continuationToken = null;
}
/**
* Creates an instance that has additional pages to fetch.
*
* @param entries Items in the page.
* @param skip Number of elements to "skip".
*/
private FeedPage(int statusCode, HttpHeaders header, HttpRequest request, List<T> entries, int skip) {
this.statusCode = statusCode;
this.header = header;
this.request = request;
this.entries = new IterableStream<>(entries);
this.continuationToken = String.valueOf(skip);
}
@Override
public IterableStream<T> getElements() {
return entries;
}
@Override
public String getContinuationToken() {
return continuationToken;
}
@Override
public int getStatusCode() {
return statusCode;
}
@Override
public HttpHeaders getHeaders() {
return header;
}
@Override
public HttpRequest getRequest() {
return request;
}
@Override
public void close() {
}
}
private static final class EntityNotFoundHttpResponse<T> extends HttpResponse {
private final int statusCode;
private final HttpHeaders headers;
private EntityNotFoundHttpResponse(Response<T> response) {
super(response.getRequest());
this.headers = response.getHeaders();
this.statusCode = response.getStatusCode();
}
@Override
public int getStatusCode() {
return statusCode;
}
@Override
public String getHeaderValue(String name) {
return headers.getValue(name);
}
@Override
public HttpHeaders getHeaders() {
return headers;
}
@Override
public Flux<ByteBuffer> getBody() {
return Flux.empty();
}
@Override
public Mono<byte[]> getBodyAsByteArray() {
return Mono.empty();
}
@Override
public Mono<String> getBodyAsString() {
return Mono.empty();
}
@Override
public Mono<String> getBodyAsString(Charset charset) {
return Mono.empty();
}
}
} |
nit: might need to consider local var `includeTextContent` to 'includeFieldElements' for the renaming as well. | public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeTextContent = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(targetStream,
form.length(), modelId).setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeTextContent).setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
} | .setIncludeFieldElements(includeTextContent).setPollInterval(Duration.ofSeconds(5))).getFinalResult() | public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(targetStream,
form.length(), modelId).setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements).setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
});
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
FormContentType.IMAGE_JPEG).getFinalResult().forEach(recognizedForm ->
recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(), FormContentType.APPLICATION_PDF)
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(
new RecognizeOptions(targetStream, form.length())
.setFormContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(), FormContentType.IMAGE_JPEG)
.getFinalResult().forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeTextContent = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(new RecognizeOptions(targetStream, receipt.length())
.setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
});
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
FormContentType.IMAGE_JPEG).getFinalResult().forEach(recognizedForm ->
recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(), FormContentType.APPLICATION_PDF)
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(
new RecognizeOptions(targetStream, form.length())
.setFormContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(), FormContentType.IMAGE_JPEG)
.getFinalResult().forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(new RecognizeOptions(targetStream, receipt.length())
.setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
} |
updated!! | public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeTextContent = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(targetStream,
form.length(), modelId).setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeTextContent).setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
} | .setIncludeFieldElements(includeTextContent).setPollInterval(Duration.ofSeconds(5))).getFinalResult() | public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(targetStream,
form.length(), modelId).setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements).setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
});
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
FormContentType.IMAGE_JPEG).getFinalResult().forEach(recognizedForm ->
recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(), FormContentType.APPLICATION_PDF)
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(
new RecognizeOptions(targetStream, form.length())
.setFormContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(), FormContentType.IMAGE_JPEG)
.getFinalResult().forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeTextContent = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(new RecognizeOptions(targetStream, receipt.length())
.setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
});
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
FormContentType.IMAGE_JPEG).getFinalResult().forEach(recognizedForm ->
recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(), FormContentType.APPLICATION_PDF)
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(
new RecognizeOptions(targetStream, form.length())
.setFormContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(), FormContentType.IMAGE_JPEG)
.getFinalResult().forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(new RecognizeOptions(targetStream, receipt.length())
.setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
} |
this should be `fieldElements` | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String filePath = "{analyze_file_path}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(filePath, modelId)
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
System.out.println("--------RECOGNIZING FORM --------");
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %s has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getFieldValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %.2f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> {
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY()));
});
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr, formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
} | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String filePath = "{analyze_file_path}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(filePath, modelId)
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
System.out.println("--------RECOGNIZING FORM --------");
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %s has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getFieldValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %.2f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> {
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY()));
});
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr, formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | |
same here | public static void main(String[] args) {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
String modelId = "{model_Id}";
String filePath = "{file_source_url}";
PollerFlux<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomFormsFromUrl(new RecognizeCustomFormsOptions(filePath, modelId)
.setIncludeFieldElements(true));
Mono<List<RecognizedForm>> recognizeFormResult = recognizeFormPoller
.last()
.flatMap(trainingOperationResponse -> {
if (trainingOperationResponse.getStatus().isComplete()) {
return trainingOperationResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ trainingOperationResponse.getStatus()));
}
});
System.out.println("--------RECOGNIZING FORM --------");
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %s has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field %s has value %s based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getFieldValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence());
});
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
final StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> {
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY()));
});
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr,
formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
});
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | public static void main(String[] args) {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
String modelId = "{model_Id}";
String filePath = "{file_source_url}";
PollerFlux<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomFormsFromUrl(new RecognizeCustomFormsOptions(filePath, modelId)
.setIncludeFieldElements(true));
Mono<List<RecognizedForm>> recognizeFormResult = recognizeFormPoller
.last()
.flatMap(trainingOperationResponse -> {
if (trainingOperationResponse.getStatus().isComplete()) {
return trainingOperationResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ trainingOperationResponse.getStatus()));
}
});
System.out.println("--------RECOGNIZING FORM --------");
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %s has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field %s has value %s based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getFieldValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence());
});
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
final StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> {
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY()));
});
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr,
formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
});
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class GetBoundingBoxesAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | class GetBoundingBoxesAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | |
should we change this to fieldData? | public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(targetStream,
form.length(), modelId).setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements).setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
} | String fieldText = entry.getKey(); | public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(targetStream,
form.length(), modelId).setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements).setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
});
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
FormContentType.IMAGE_JPEG).getFinalResult().forEach(recognizedForm ->
recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(), FormContentType.APPLICATION_PDF)
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(
new RecognizeOptions(targetStream, form.length())
.setFormContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(), FormContentType.IMAGE_JPEG)
.getFinalResult().forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(new RecognizeOptions(targetStream, receipt.length())
.setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
});
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
FormContentType.IMAGE_JPEG).getFinalResult().forEach(recognizedForm ->
recognizedForm.getFields().entrySet().forEach(entry -> {
String fieldText = entry.getKey();
FormField fieldValue = entry.getValue();
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getFieldValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(), FormContentType.APPLICATION_PDF)
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(
new RecognizeOptions(targetStream, form.length())
.setFormContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %s, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(), FormContentType.IMAGE_JPEG)
.getFinalResult().forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(new RecognizeOptions(targetStream, receipt.length())
.setFormContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5))).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField> recognizedFields = recognizedReceipt.getRecognizedForm().getFields();
FormField merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField.getFieldValue().getType() == FieldValueType.STRING) {
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantNameField.getFieldValue().asString(),
merchantNameField.getConfidence());
}
FormField transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField.getFieldValue().getType() == FieldValueType.DATE) {
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDateField.getFieldValue().asDate(),
transactionDateField.getConfidence());
}
FormField receiptItemsField = recognizedFields.get("Items");
System.out.printf("Receipt Items: %n");
if (receiptItemsField.getFieldValue().getType() == FieldValueType.LIST) {
List<FormField> receiptItems = receiptItemsField.getFieldValue().asList();
receiptItems.forEach(receiptItem -> {
if (receiptItem.getFieldValue().getType() == FieldValueType.MAP) {
receiptItem.getFieldValue().asMap().forEach((key, formField) -> {
if (key.equals("Quantity")) {
if (formField.getFieldValue().getType() == FieldValueType.INTEGER) {
System.out.printf("Quantity: %d, confidence: %.2f%n",
formField.getFieldValue().asInteger(), formField.getConfidence());
}
}
});
}
});
}
});
}
} |
same here? | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String filePath = "{analyze_file_path}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(filePath, modelId)
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
System.out.println("--------RECOGNIZING FORM --------");
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %s has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getFieldValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %.2f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> {
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY()));
});
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr, formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
} | recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s " | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String filePath = "{analyze_file_path}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(filePath, modelId)
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
System.out.println("--------RECOGNIZING FORM --------");
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %s has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getFieldValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %.2f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> {
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY()));
});
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr, formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} |
formelement? | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String filePath = "{analyze_file_path}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(filePath, modelId)
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
System.out.println("--------RECOGNIZING FORM --------");
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %s has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getFieldValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %.2f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> {
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY()));
});
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr, formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
} | formTableCell.getFieldElements().forEach(formContent -> { | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String filePath = "{analyze_file_path}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(filePath, modelId)
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
System.out.println("--------RECOGNIZING FORM --------");
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %s has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getFieldValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %.2f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> {
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY()));
});
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr, formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} |
this needs to change too | public static void main(String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
List<RecognizedForm> formsWithLabeledModel =
client.beginRecognizeCustomForms(
new RecognizeCustomFormsOptions(new FileInputStream(analyzeFile), analyzeFile.length(),
"{labeled_model_Id}").setFormContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true).setPollInterval(Duration.ofSeconds(5))).getFinalResult();
List<RecognizedForm> formsWithUnlabeledModel =
client.beginRecognizeCustomForms(new FileInputStream(analyzeFile), analyzeFile.length(),
"{unlabeled_model_Id}",
FormContentType.APPLICATION_PDF).getFinalResult();
System.out.println("--------Recognizing forms with labeled custom model--------");
formsWithLabeledModel.forEach(labeledForm -> labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getFieldValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "MerchantName".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
}));
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
formsWithUnlabeledModel.forEach(unLabeledForm -> unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
final StringBuilder boundingBoxLabelStr = new StringBuilder();
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), boundingBoxLabelStr, formField.getConfidence());
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getFieldValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry -> System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
}));
} | public static void main(String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
List<RecognizedForm> formsWithLabeledModel =
client.beginRecognizeCustomForms(
new RecognizeCustomFormsOptions(new FileInputStream(analyzeFile), analyzeFile.length(),
"{labeled_model_Id}").setFormContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true).setPollInterval(Duration.ofSeconds(5))).getFinalResult();
List<RecognizedForm> formsWithUnlabeledModel =
client.beginRecognizeCustomForms(new FileInputStream(analyzeFile), analyzeFile.length(),
"{unlabeled_model_Id}",
FormContentType.APPLICATION_PDF).getFinalResult();
System.out.println("--------Recognizing forms with labeled custom model--------");
formsWithLabeledModel.forEach(labeledForm -> labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getFieldValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "MerchantName".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
}));
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
formsWithUnlabeledModel.forEach(unLabeledForm -> unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
final StringBuilder boundingBoxLabelStr = new StringBuilder();
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), boundingBoxLabelStr, formField.getConfidence());
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getFieldValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry -> System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
}));
} | class AdvancedDiffLabeledUnlabeledData {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class AdvancedDiffLabeledUnlabeledData {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | |
@weidongxu-microsoft Silly question but why are we just disabling the assert and still bothering to get the callout count? With this disabled, are we missing a piece of verification that we should have for this scenario? | public void lroTimeout() {
final Duration timeoutDuration = Duration.ofMillis(1000);
final String resourceEndpoint = "/resource/1";
final AtomicInteger getCallCount = new AtomicInteger(0);
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT, RequestMethod.GET)) {
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount.getAndIncrement();
}
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
Mono<FooWithProvisioningState> resultMonoWithTimeout = lroFlux.last()
.flatMap(AsyncPollResponse::getFinalResult)
.timeout(timeoutDuration);
StepVerifier.create(resultMonoWithTimeout)
.thenAwait()
.verifyError(TimeoutException.class);
int count = getCallCount.get();
try {
Thread.sleep(timeoutDuration.toMillis());
} catch (InterruptedException e) {
}
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
} | public void lroTimeout() {
final Duration timeoutDuration = Duration.ofMillis(1000);
final String resourceEndpoint = "/resource/1";
final AtomicInteger getCallCount = new AtomicInteger(0);
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT, RequestMethod.GET)) {
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount.getAndIncrement();
}
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
Mono<FooWithProvisioningState> resultMonoWithTimeout = lroFlux.last()
.flatMap(AsyncPollResponse::getFinalResult)
.timeout(timeoutDuration);
StepVerifier.create(resultMonoWithTimeout)
.thenAwait()
.verifyError(TimeoutException.class);
int count = getCallCount.get();
try {
Thread.sleep(timeoutDuration.toMillis());
} catch (InterruptedException e) {
}
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
} | class LROPollerTests {
private static final SerializerAdapter SERIALIZER = new AzureJacksonAdapter();
private static final Duration POLLING_DURATION = Duration.ofMillis(100);
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Host("http:
@ServiceInterface(name = "ProvisioningStateLroService")
interface ProvisioningStateLroServiceClient {
@Put("/resource/1")
Mono<Response<Flux<ByteBuffer>>> startLro(Context context);
}
@Test
public void lroBasedOnProvisioningState() {
WireMockServer lroServer = startServer();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
Assertions.assertNull(pollResult.getValue().getResourceId());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
Assertions.assertNotNull(pollResult.getValue().getResourceId());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperation() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Succeeded\"}")
.build();
}
} else if (request.getUrl().endsWith(resourceEndpoint) && getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("Succeeded", UUID.randomUUID().toString())))
.build();
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
FooWithProvisioningState foo = pollResponse.getFinalResult().block();
Assertions.assertNotNull(foo.getResourceId());
Assertions.assertEquals("Succeeded", foo.getProvisioningState());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperationFailed() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Failed\"}")
.build();
}
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertNotNull(pollResult.getValue());
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.FAILED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
Assertions.assertEquals(LongRunningOperationStatus.FAILED, pollResponse.getStatus());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroSucceededNoPoll() {
final String resourceEndpoint = "/resource/1";
final String sampleVaultUpdateSucceededResponse = "{\"id\":\"/subscriptions/
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(200)
.body(sampleVaultUpdateSucceededResponse)
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<Resource>, Resource> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
Resource.class,
Resource.class,
POLLING_DURATION,
newLroInitFunction(client));
StepVerifier.create(lroFlux)
.expectSubscription()
.expectNextMatches(response -> {
PollResult<Resource> pollResult = response.getValue();
return response.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED
&& pollResult != null
&& pollResult.getValue() != null
&& pollResult.getValue().id() != null;
}).verifyComplete();
AsyncPollResponse<PollResult<Resource>, Resource> asyncPollResponse = lroFlux.blockLast();
Assertions.assertNotNull(asyncPollResponse);
Resource result = asyncPollResponse.getFinalResult().block();
Assertions.assertNotNull(result);
Assertions.assertNotNull(result.id());
Assertions.assertEquals("v1weidxu", result.name());
Assertions.assertEquals("Microsoft.KeyVault/vaults", result.type());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
@Test
public void lroRetryAfter() {
ServerConfigure configure = new ServerConfigure();
Duration expectedPollingDuration = Duration.ofSeconds(3);
configure.pollingCountTillSuccess = 3;
configure.additionalHeaders = new HttpHeaders(new HttpHeader("Retry-After", "1"));
WireMockServer lroServer = startServer(configure);
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
long nanoTime = System.nanoTime();
FooWithProvisioningState result = lroFlux
.doOnNext(response -> {
System.out.println(String.format("[%s] status %s",
OffsetDateTime.now().toString(), response.getStatus().toString()));
}).blockLast()
.getFinalResult().block();
Assertions.assertNotNull(result);
Duration pollingDuration = Duration.ofNanos(System.nanoTime() - nanoTime);
Assertions.assertTrue(pollingDuration.compareTo(expectedPollingDuration) > 0);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroContext() {
WireMockServer lroServer = startServer();
HttpPipelinePolicy contextVerifyPolicy = (context, next) -> {
Optional<Object> valueOpt = context.getData("key1");
if (valueOpt.isPresent() && "value1".equals(valueOpt.get())) {
return next.process();
} else {
return Mono.error(new AssertionError());
}
};
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port(), Collections.singletonList(contextVerifyPolicy)),
SERIALIZER);
Flux<AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState>> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
lroFlux = lroFlux.subscriberContext(context -> context.put("key1", "value1"));
FooWithProvisioningState result = lroFlux
.blockLast()
.getFinalResult()
.block();
Assertions.assertNotNull(result);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
private static class ServerConfigure {
private int pollingCountTillSuccess = 2;
private HttpHeaders additionalHeaders = HttpHeaders.noHeaders();
}
private static WireMockServer startServer() {
return startServer(new ServerConfigure());
}
private static WireMockServer startServer(ServerConfigure serverConfigure) {
final String resourceEndpoint = "/resource/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
System.out.println(String.format("[%s] PUT status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "SUCCEEDED"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("SUCCEEDED", UUID.randomUUID().toString())))
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
return lroServer;
}
private static WireMockServer createServer(ResponseTransformer transformer,
String... endpoints) {
WireMockServer server = new WireMockServer(WireMockConfiguration
.options()
.dynamicPort()
.extensions(transformer)
.disableRequestJournal());
for (String endpoint : endpoints) {
server.stubFor(WireMock.any(WireMock.urlEqualTo(endpoint))
.willReturn(WireMock.aResponse()));
}
return server;
}
private static HttpPipeline createHttpPipeline(int port) {
return createHttpPipeline(port, Collections.emptyList());
}
private static HttpPipeline createHttpPipeline(int port, List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<>(additionalPolicies);
policies.add(new HttpPipelinePolicy() {
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context,
HttpPipelineNextPolicy next) {
HttpRequest request = context.getHttpRequest();
request.setUrl(updatePort(request.getUrl(), port));
context.setHttpRequest(request);
return next.process();
}
private URL updatePort(URL url, int port) {
try {
return new URL(url.getProtocol(), url.getHost(), port, url.getFile());
} catch (MalformedURLException mue) {
throw new RuntimeException(mue);
}
}
});
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
private Mono<Response<Flux<ByteBuffer>>> newLroInitFunction(ProvisioningStateLroServiceClient client) {
return FluxUtil.fluxContext(context -> client.startLro(context).flux()).next();
}
private static String toJson(Object object) {
try {
return SERIALIZER.serialize(object, SerializerEncoding.JSON);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
} | class LROPollerTests {
private static final SerializerAdapter SERIALIZER = new AzureJacksonAdapter();
private static final Duration POLLING_DURATION = Duration.ofMillis(100);
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Host("http:
@ServiceInterface(name = "ProvisioningStateLroService")
interface ProvisioningStateLroServiceClient {
@Put("/resource/1")
Mono<Response<Flux<ByteBuffer>>> startLro(Context context);
}
@Test
public void lroBasedOnProvisioningState() {
WireMockServer lroServer = startServer();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
Assertions.assertNull(pollResult.getValue().getResourceId());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
Assertions.assertNotNull(pollResult.getValue().getResourceId());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperation() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Succeeded\"}")
.build();
}
} else if (request.getUrl().endsWith(resourceEndpoint) && getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("Succeeded", UUID.randomUUID().toString())))
.build();
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
FooWithProvisioningState foo = pollResponse.getFinalResult().block();
Assertions.assertNotNull(foo.getResourceId());
Assertions.assertEquals("Succeeded", foo.getProvisioningState());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperationFailed() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Failed\"}")
.build();
}
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertNotNull(pollResult.getValue());
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.FAILED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
Assertions.assertEquals(LongRunningOperationStatus.FAILED, pollResponse.getStatus());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroSucceededNoPoll() {
final String resourceEndpoint = "/resource/1";
final String sampleVaultUpdateSucceededResponse = "{\"id\":\"/subscriptions/
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(200)
.body(sampleVaultUpdateSucceededResponse)
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<Resource>, Resource> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
Resource.class,
Resource.class,
POLLING_DURATION,
newLroInitFunction(client));
StepVerifier.create(lroFlux)
.expectSubscription()
.expectNextMatches(response -> {
PollResult<Resource> pollResult = response.getValue();
return response.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED
&& pollResult != null
&& pollResult.getValue() != null
&& pollResult.getValue().id() != null;
}).verifyComplete();
AsyncPollResponse<PollResult<Resource>, Resource> asyncPollResponse = lroFlux.blockLast();
Assertions.assertNotNull(asyncPollResponse);
Resource result = asyncPollResponse.getFinalResult().block();
Assertions.assertNotNull(result);
Assertions.assertNotNull(result.id());
Assertions.assertEquals("v1weidxu", result.name());
Assertions.assertEquals("Microsoft.KeyVault/vaults", result.type());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
@Test
public void lroRetryAfter() {
ServerConfigure configure = new ServerConfigure();
Duration expectedPollingDuration = Duration.ofSeconds(3);
configure.pollingCountTillSuccess = 3;
configure.additionalHeaders = new HttpHeaders(new HttpHeader("Retry-After", "1"));
WireMockServer lroServer = startServer(configure);
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
long nanoTime = System.nanoTime();
FooWithProvisioningState result = lroFlux
.doOnNext(response -> {
System.out.println(String.format("[%s] status %s",
OffsetDateTime.now().toString(), response.getStatus().toString()));
}).blockLast()
.getFinalResult().block();
Assertions.assertNotNull(result);
Duration pollingDuration = Duration.ofNanos(System.nanoTime() - nanoTime);
Assertions.assertTrue(pollingDuration.compareTo(expectedPollingDuration) > 0);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroContext() {
WireMockServer lroServer = startServer();
HttpPipelinePolicy contextVerifyPolicy = (context, next) -> {
Optional<Object> valueOpt = context.getData("key1");
if (valueOpt.isPresent() && "value1".equals(valueOpt.get())) {
return next.process();
} else {
return Mono.error(new AssertionError());
}
};
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port(), Collections.singletonList(contextVerifyPolicy)),
SERIALIZER);
Flux<AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState>> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
lroFlux = lroFlux.subscriberContext(context -> context.put("key1", "value1"));
FooWithProvisioningState result = lroFlux
.blockLast()
.getFinalResult()
.block();
Assertions.assertNotNull(result);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
private static class ServerConfigure {
private int pollingCountTillSuccess = 2;
private HttpHeaders additionalHeaders = HttpHeaders.noHeaders();
}
private static WireMockServer startServer() {
return startServer(new ServerConfigure());
}
private static WireMockServer startServer(ServerConfigure serverConfigure) {
final String resourceEndpoint = "/resource/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
System.out.println(String.format("[%s] PUT status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "SUCCEEDED"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("SUCCEEDED", UUID.randomUUID().toString())))
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
return lroServer;
}
private static WireMockServer createServer(ResponseTransformer transformer,
String... endpoints) {
WireMockServer server = new WireMockServer(WireMockConfiguration
.options()
.dynamicPort()
.extensions(transformer)
.disableRequestJournal());
for (String endpoint : endpoints) {
server.stubFor(WireMock.any(WireMock.urlEqualTo(endpoint))
.willReturn(WireMock.aResponse()));
}
return server;
}
private static HttpPipeline createHttpPipeline(int port) {
return createHttpPipeline(port, Collections.emptyList());
}
private static HttpPipeline createHttpPipeline(int port, List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<>(additionalPolicies);
policies.add(new HttpPipelinePolicy() {
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context,
HttpPipelineNextPolicy next) {
HttpRequest request = context.getHttpRequest();
request.setUrl(updatePort(request.getUrl(), port));
context.setHttpRequest(request);
return next.process();
}
private URL updatePort(URL url, int port) {
try {
return new URL(url.getProtocol(), url.getHost(), port, url.getFile());
} catch (MalformedURLException mue) {
throw new RuntimeException(mue);
}
}
});
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
private Mono<Response<Flux<ByteBuffer>>> newLroInitFunction(ProvisioningStateLroServiceClient client) {
return FluxUtil.fluxContext(context -> client.startLro(context).flux()).next();
}
private static String toJson(Object object) {
try {
return SERIALIZER.serialize(object, SerializerEncoding.JSON);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
} | |
You are correct. Ideally we should still do verification (maybe relax a bit on the condition for success). Due to the timing that the issue get called out (Alan notified that it blocking the release, and I've no idea if I relax the condition a bit will it still cause the issue), and the importance of the test (it is not very important, the test mostly only verify that Reactor behaves correctly, and due to async nature of the Reactor, the count might be off a bit), the nature of the test (it depends on real time, and we do not want to wait too long in unit test), hence for now we only disable the assert line. | public void lroTimeout() {
final Duration timeoutDuration = Duration.ofMillis(1000);
final String resourceEndpoint = "/resource/1";
final AtomicInteger getCallCount = new AtomicInteger(0);
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT, RequestMethod.GET)) {
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount.getAndIncrement();
}
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
Mono<FooWithProvisioningState> resultMonoWithTimeout = lroFlux.last()
.flatMap(AsyncPollResponse::getFinalResult)
.timeout(timeoutDuration);
StepVerifier.create(resultMonoWithTimeout)
.thenAwait()
.verifyError(TimeoutException.class);
int count = getCallCount.get();
try {
Thread.sleep(timeoutDuration.toMillis());
} catch (InterruptedException e) {
}
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
} | public void lroTimeout() {
final Duration timeoutDuration = Duration.ofMillis(1000);
final String resourceEndpoint = "/resource/1";
final AtomicInteger getCallCount = new AtomicInteger(0);
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT, RequestMethod.GET)) {
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount.getAndIncrement();
}
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
Mono<FooWithProvisioningState> resultMonoWithTimeout = lroFlux.last()
.flatMap(AsyncPollResponse::getFinalResult)
.timeout(timeoutDuration);
StepVerifier.create(resultMonoWithTimeout)
.thenAwait()
.verifyError(TimeoutException.class);
int count = getCallCount.get();
try {
Thread.sleep(timeoutDuration.toMillis());
} catch (InterruptedException e) {
}
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
} | class LROPollerTests {
private static final SerializerAdapter SERIALIZER = new AzureJacksonAdapter();
private static final Duration POLLING_DURATION = Duration.ofMillis(100);
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Host("http:
@ServiceInterface(name = "ProvisioningStateLroService")
interface ProvisioningStateLroServiceClient {
@Put("/resource/1")
Mono<Response<Flux<ByteBuffer>>> startLro(Context context);
}
@Test
public void lroBasedOnProvisioningState() {
WireMockServer lroServer = startServer();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
Assertions.assertNull(pollResult.getValue().getResourceId());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
Assertions.assertNotNull(pollResult.getValue().getResourceId());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperation() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Succeeded\"}")
.build();
}
} else if (request.getUrl().endsWith(resourceEndpoint) && getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("Succeeded", UUID.randomUUID().toString())))
.build();
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
FooWithProvisioningState foo = pollResponse.getFinalResult().block();
Assertions.assertNotNull(foo.getResourceId());
Assertions.assertEquals("Succeeded", foo.getProvisioningState());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperationFailed() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Failed\"}")
.build();
}
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertNotNull(pollResult.getValue());
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.FAILED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
Assertions.assertEquals(LongRunningOperationStatus.FAILED, pollResponse.getStatus());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroSucceededNoPoll() {
final String resourceEndpoint = "/resource/1";
final String sampleVaultUpdateSucceededResponse = "{\"id\":\"/subscriptions/
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(200)
.body(sampleVaultUpdateSucceededResponse)
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<Resource>, Resource> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
Resource.class,
Resource.class,
POLLING_DURATION,
newLroInitFunction(client));
StepVerifier.create(lroFlux)
.expectSubscription()
.expectNextMatches(response -> {
PollResult<Resource> pollResult = response.getValue();
return response.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED
&& pollResult != null
&& pollResult.getValue() != null
&& pollResult.getValue().id() != null;
}).verifyComplete();
AsyncPollResponse<PollResult<Resource>, Resource> asyncPollResponse = lroFlux.blockLast();
Assertions.assertNotNull(asyncPollResponse);
Resource result = asyncPollResponse.getFinalResult().block();
Assertions.assertNotNull(result);
Assertions.assertNotNull(result.id());
Assertions.assertEquals("v1weidxu", result.name());
Assertions.assertEquals("Microsoft.KeyVault/vaults", result.type());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
@Test
public void lroRetryAfter() {
ServerConfigure configure = new ServerConfigure();
Duration expectedPollingDuration = Duration.ofSeconds(3);
configure.pollingCountTillSuccess = 3;
configure.additionalHeaders = new HttpHeaders(new HttpHeader("Retry-After", "1"));
WireMockServer lroServer = startServer(configure);
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
long nanoTime = System.nanoTime();
FooWithProvisioningState result = lroFlux
.doOnNext(response -> {
System.out.println(String.format("[%s] status %s",
OffsetDateTime.now().toString(), response.getStatus().toString()));
}).blockLast()
.getFinalResult().block();
Assertions.assertNotNull(result);
Duration pollingDuration = Duration.ofNanos(System.nanoTime() - nanoTime);
Assertions.assertTrue(pollingDuration.compareTo(expectedPollingDuration) > 0);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroContext() {
WireMockServer lroServer = startServer();
HttpPipelinePolicy contextVerifyPolicy = (context, next) -> {
Optional<Object> valueOpt = context.getData("key1");
if (valueOpt.isPresent() && "value1".equals(valueOpt.get())) {
return next.process();
} else {
return Mono.error(new AssertionError());
}
};
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port(), Collections.singletonList(contextVerifyPolicy)),
SERIALIZER);
Flux<AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState>> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
lroFlux = lroFlux.subscriberContext(context -> context.put("key1", "value1"));
FooWithProvisioningState result = lroFlux
.blockLast()
.getFinalResult()
.block();
Assertions.assertNotNull(result);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
private static class ServerConfigure {
private int pollingCountTillSuccess = 2;
private HttpHeaders additionalHeaders = HttpHeaders.noHeaders();
}
private static WireMockServer startServer() {
return startServer(new ServerConfigure());
}
private static WireMockServer startServer(ServerConfigure serverConfigure) {
final String resourceEndpoint = "/resource/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
System.out.println(String.format("[%s] PUT status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "SUCCEEDED"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("SUCCEEDED", UUID.randomUUID().toString())))
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
return lroServer;
}
private static WireMockServer createServer(ResponseTransformer transformer,
String... endpoints) {
WireMockServer server = new WireMockServer(WireMockConfiguration
.options()
.dynamicPort()
.extensions(transformer)
.disableRequestJournal());
for (String endpoint : endpoints) {
server.stubFor(WireMock.any(WireMock.urlEqualTo(endpoint))
.willReturn(WireMock.aResponse()));
}
return server;
}
private static HttpPipeline createHttpPipeline(int port) {
return createHttpPipeline(port, Collections.emptyList());
}
private static HttpPipeline createHttpPipeline(int port, List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<>(additionalPolicies);
policies.add(new HttpPipelinePolicy() {
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context,
HttpPipelineNextPolicy next) {
HttpRequest request = context.getHttpRequest();
request.setUrl(updatePort(request.getUrl(), port));
context.setHttpRequest(request);
return next.process();
}
private URL updatePort(URL url, int port) {
try {
return new URL(url.getProtocol(), url.getHost(), port, url.getFile());
} catch (MalformedURLException mue) {
throw new RuntimeException(mue);
}
}
});
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
private Mono<Response<Flux<ByteBuffer>>> newLroInitFunction(ProvisioningStateLroServiceClient client) {
return FluxUtil.fluxContext(context -> client.startLro(context).flux()).next();
}
private static String toJson(Object object) {
try {
return SERIALIZER.serialize(object, SerializerEncoding.JSON);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
} | class LROPollerTests {
private static final SerializerAdapter SERIALIZER = new AzureJacksonAdapter();
private static final Duration POLLING_DURATION = Duration.ofMillis(100);
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Host("http:
@ServiceInterface(name = "ProvisioningStateLroService")
interface ProvisioningStateLroServiceClient {
@Put("/resource/1")
Mono<Response<Flux<ByteBuffer>>> startLro(Context context);
}
@Test
public void lroBasedOnProvisioningState() {
WireMockServer lroServer = startServer();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
Assertions.assertNull(pollResult.getValue().getResourceId());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
Assertions.assertNotNull(pollResult.getValue().getResourceId());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperation() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Succeeded\"}")
.build();
}
} else if (request.getUrl().endsWith(resourceEndpoint) && getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("Succeeded", UUID.randomUUID().toString())))
.build();
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
FooWithProvisioningState foo = pollResponse.getFinalResult().block();
Assertions.assertNotNull(foo.getResourceId());
Assertions.assertEquals("Succeeded", foo.getProvisioningState());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperationFailed() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Failed\"}")
.build();
}
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertNotNull(pollResult.getValue());
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.FAILED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
Assertions.assertEquals(LongRunningOperationStatus.FAILED, pollResponse.getStatus());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroSucceededNoPoll() {
final String resourceEndpoint = "/resource/1";
final String sampleVaultUpdateSucceededResponse = "{\"id\":\"/subscriptions/
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(200)
.body(sampleVaultUpdateSucceededResponse)
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<Resource>, Resource> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
Resource.class,
Resource.class,
POLLING_DURATION,
newLroInitFunction(client));
StepVerifier.create(lroFlux)
.expectSubscription()
.expectNextMatches(response -> {
PollResult<Resource> pollResult = response.getValue();
return response.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED
&& pollResult != null
&& pollResult.getValue() != null
&& pollResult.getValue().id() != null;
}).verifyComplete();
AsyncPollResponse<PollResult<Resource>, Resource> asyncPollResponse = lroFlux.blockLast();
Assertions.assertNotNull(asyncPollResponse);
Resource result = asyncPollResponse.getFinalResult().block();
Assertions.assertNotNull(result);
Assertions.assertNotNull(result.id());
Assertions.assertEquals("v1weidxu", result.name());
Assertions.assertEquals("Microsoft.KeyVault/vaults", result.type());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
@Test
public void lroRetryAfter() {
ServerConfigure configure = new ServerConfigure();
Duration expectedPollingDuration = Duration.ofSeconds(3);
configure.pollingCountTillSuccess = 3;
configure.additionalHeaders = new HttpHeaders(new HttpHeader("Retry-After", "1"));
WireMockServer lroServer = startServer(configure);
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
long nanoTime = System.nanoTime();
FooWithProvisioningState result = lroFlux
.doOnNext(response -> {
System.out.println(String.format("[%s] status %s",
OffsetDateTime.now().toString(), response.getStatus().toString()));
}).blockLast()
.getFinalResult().block();
Assertions.assertNotNull(result);
Duration pollingDuration = Duration.ofNanos(System.nanoTime() - nanoTime);
Assertions.assertTrue(pollingDuration.compareTo(expectedPollingDuration) > 0);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroContext() {
WireMockServer lroServer = startServer();
HttpPipelinePolicy contextVerifyPolicy = (context, next) -> {
Optional<Object> valueOpt = context.getData("key1");
if (valueOpt.isPresent() && "value1".equals(valueOpt.get())) {
return next.process();
} else {
return Mono.error(new AssertionError());
}
};
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port(), Collections.singletonList(contextVerifyPolicy)),
SERIALIZER);
Flux<AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState>> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
lroFlux = lroFlux.subscriberContext(context -> context.put("key1", "value1"));
FooWithProvisioningState result = lroFlux
.blockLast()
.getFinalResult()
.block();
Assertions.assertNotNull(result);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
private static class ServerConfigure {
private int pollingCountTillSuccess = 2;
private HttpHeaders additionalHeaders = HttpHeaders.noHeaders();
}
private static WireMockServer startServer() {
return startServer(new ServerConfigure());
}
private static WireMockServer startServer(ServerConfigure serverConfigure) {
final String resourceEndpoint = "/resource/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
System.out.println(String.format("[%s] PUT status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "SUCCEEDED"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("SUCCEEDED", UUID.randomUUID().toString())))
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
return lroServer;
}
private static WireMockServer createServer(ResponseTransformer transformer,
String... endpoints) {
WireMockServer server = new WireMockServer(WireMockConfiguration
.options()
.dynamicPort()
.extensions(transformer)
.disableRequestJournal());
for (String endpoint : endpoints) {
server.stubFor(WireMock.any(WireMock.urlEqualTo(endpoint))
.willReturn(WireMock.aResponse()));
}
return server;
}
private static HttpPipeline createHttpPipeline(int port) {
return createHttpPipeline(port, Collections.emptyList());
}
private static HttpPipeline createHttpPipeline(int port, List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<>(additionalPolicies);
policies.add(new HttpPipelinePolicy() {
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context,
HttpPipelineNextPolicy next) {
HttpRequest request = context.getHttpRequest();
request.setUrl(updatePort(request.getUrl(), port));
context.setHttpRequest(request);
return next.process();
}
private URL updatePort(URL url, int port) {
try {
return new URL(url.getProtocol(), url.getHost(), port, url.getFile());
} catch (MalformedURLException mue) {
throw new RuntimeException(mue);
}
}
});
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
private Mono<Response<Flux<ByteBuffer>>> newLroInitFunction(ProvisioningStateLroServiceClient client) {
return FluxUtil.fluxContext(context -> client.startLro(context).flux()).next();
}
private static String toJson(Object object) {
try {
return SERIALIZER.serialize(object, SerializerEncoding.JSON);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
} | |
Maybe a `Mono.error` would be more proper? Generally we would like an error early than late. | public static Mono<byte[]> downloadFileAsync(String url, HttpPipeline httpPipeline) {
FileService service = RestProxy.create(FileService.class, httpPipeline);
try {
return service.download(getHost(url), getPathAndQuery(url))
.flatMap(response -> FluxUtil.collectBytesInByteBufferStream(response.getValue()));
} catch (MalformedURLException ex) {
return Mono.empty();
}
} | return Mono.empty(); | public static Mono<byte[]> downloadFileAsync(String url, HttpPipeline httpPipeline) {
FileService service = RestProxy.create(FileService.class, httpPipeline);
try {
return service.download(getHost(url), getPathAndQuery(url))
.flatMap(response -> FluxUtil.collectBytesInByteBufferStream(response.getValue()));
} catch (MalformedURLException ex) {
return Mono.error(() -> ex);
}
} | class Utils {
private Utils() {
}
/**
* Converts an object Boolean to a primitive boolean.
*
* @param value the Boolean value
* @return false if the given Boolean value is null or false else true
*/
public static boolean toPrimitiveBoolean(Boolean value) {
if (value == null) {
return false;
}
return value.booleanValue();
}
/**
* Converts an object Integer to a primitive int.
*
* @param value the Integer value
* @return 0 if the given Integer value is null else integer value
*/
public static int toPrimitiveInt(Integer value) {
if (value == null) {
return 0;
}
return value.intValue();
}
/**
* Converts an object Long to a primitive int.
*
* @param value the Long value
* @return 0 if the given Long value is null else integer value
*/
public static int toPrimitiveInt(Long value) {
if (value == null) {
return 0;
}
return Math.toIntExact(value);
}
/**
* Converts an object Long to a primitive long.
*
* @param value the Long value
* @return 0 if the given Long value is null else long value
*/
public static long toPrimitiveLong(Long value) {
if (value == null) {
return 0;
}
return value;
}
/**
* Creates an Odata filter string that can be used for filtering list results by tags.
*
* @param tagName the name of the tag. If not provided, all resources will be returned.
* @param tagValue the value of the tag. If not provided, only tag name will be filtered.
* @return the Odata filter to pass into list methods
*/
public static String createOdataFilterForTags(String tagName, String tagValue) {
if (tagName == null) {
return null;
} else if (tagValue == null) {
return String.format("tagname eq '%s'", tagName);
} else {
return String.format("tagname eq '%s' and tagvalue eq '%s'", tagName, tagValue);
}
}
/**
* Gets a Mono of type {@code U}, where U extends {@link Indexable}, that emits only the root
* resource from a given Mono of {@link Indexable}.
*
* @param stream the input Mono of {@link Indexable}
* @param <U> the specialized type of last item in the input stream
* @return a Mono that emits last item
*/
@SuppressWarnings("unchecked")
public static <U extends Indexable> Mono<U> rootResource(Mono<Indexable> stream) {
return stream.map(indexable -> (U) indexable);
}
/**
* Download a file asynchronously.
*
* @param url the URL pointing to the file
* @param httpPipeline the http pipeline
* @return an Observable pointing to the content of the file
*/
/**
* Get host from url.
*
* @param urlString the url string
* @return the host
* @throws MalformedURLException when url is invalid format
*/
public static String getHost(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String protocol = url.getProtocol();
String host = url.getAuthority();
return protocol + ":
}
/**
* Get path from url.
*
* @param urlString the url string
* @return the path
* @throws MalformedURLException when the url is invalid format
*/
public static String getPathAndQuery(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String path = url.getPath();
String query = url.getQuery();
if (query != null && !query.isEmpty()) {
path = path + "?" + query;
}
return path;
}
/**
* Adds a value to the list if does not already exists.
*
* @param list the list
* @param value value to add if not exists in the list
*/
public static void addToListIfNotExists(List<String> list, String value) {
boolean found = false;
for (String item : list) {
if (item.equalsIgnoreCase(value)) {
found = true;
break;
}
}
if (!found) {
list.add(value);
}
}
/**
* Removes a value from the list.
*
* @param list the list
* @param value value to remove
*/
public static void removeFromList(List<String> list, String value) {
int foundIndex = -1;
int i = 0;
for (String id : list) {
if (id.equalsIgnoreCase(value)) {
foundIndex = i;
break;
}
i++;
}
if (foundIndex != -1) {
list.remove(foundIndex);
}
}
/**
* @param id resource id
* @return resource group id for the resource id provided
*/
public static String resourceGroupId(String id) {
final ResourceId resourceId = ResourceId.fromString(id);
return String.format("/subscriptions/%s/resourceGroups/%s",
resourceId.subscriptionId(),
resourceId.resourceGroupName());
}
/**
* A Retrofit service used to download a file.
*/
@Host("{$host}")
@ServiceInterface(name = "FileService")
private interface FileService {
@Get("{path}")
Mono<SimpleResponse<Flux<ByteBuffer>>> download(
@HostParam("$host") String host, @PathParam(value = "path", encoded = true) String path);
}
/**
* Gets the only subscription as the default one in the tenant if applicable.
*
* @param subscriptions the list of subscriptions
* @throws IllegalStateException when no subscription or more than one subscription found
* @return the only subscription existing in the tenant
*/
public static String defaultSubscription(PagedIterable<Subscription> subscriptions) {
List<Subscription> subscriptionList = new ArrayList<>();
subscriptions.forEach(subscription -> {
subscriptionList.add(subscription);
});
if (subscriptionList.size() == 0) {
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptionList.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptionList.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException(stringBuilder.toString()));
}
return subscriptionList.get(0).subscriptionId();
}
/**
* Generates default scope for oauth2 from the specific request
* @param request a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromRequest(HttpRequest request, AzureEnvironment environment) {
return getDefaultScopeFromUrl(request.getUrl().toString().toLowerCase(Locale.ROOT), environment);
}
/**
* Generates default scope for oauth2 from the specific request
* @param url the url in lower case of a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromUrl(String url, AzureEnvironment environment) {
String resource = environment.getManagementEndpoint();
for (Map.Entry<String, String> endpoint : environment.endpoints().entrySet()) {
if (url.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(AzureEnvironment.Endpoint.KEYVAULT.identifier())) {
resource = String.format("https:
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.GRAPH.identifier())) {
resource = environment.getGraphEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment.getLogAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment.getApplicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment.getDataLakeEndpointResourceId();
break;
}
}
}
return removeTrailingSlash(resource) + "/.default";
}
/**
* Removes the trailing slash of the string.
* @param s the string
* @return the string without trailing slash
*/
public static String removeTrailingSlash(String s) {
if (s == null || s.length() == 0) {
return s;
}
if (s.endsWith("/")) {
return s.substring(0, s.length() - 1);
}
return s;
}
/**
* Get the Azure storage account connection string.
* @param accountName storage account name
* @param accountKey storage account key
* @param environment the Azure environment
* @return the storage account connection string.
*/
public static String getStorageConnectionString(String accountName, String accountKey,
AzureEnvironment environment) {
if (environment == null || environment.getStorageEndpointSuffix() == null) {
environment = AzureEnvironment.AZURE;
}
String suffix = environment.getStorageEndpointSuffix().replaceAll("^\\.*", "");
return String.format("DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=%s",
accountName, accountKey, suffix);
}
} | class Utils {
private Utils() {
}
/**
* Converts an object Boolean to a primitive boolean.
*
* @param value the Boolean value
* @return false if the given Boolean value is null or false else true
*/
public static boolean toPrimitiveBoolean(Boolean value) {
if (value == null) {
return false;
}
return value.booleanValue();
}
/**
* Converts an object Integer to a primitive int.
*
* @param value the Integer value
* @return 0 if the given Integer value is null else integer value
*/
public static int toPrimitiveInt(Integer value) {
if (value == null) {
return 0;
}
return value.intValue();
}
/**
* Converts an object Long to a primitive int.
*
* @param value the Long value
* @return 0 if the given Long value is null else integer value
*/
public static int toPrimitiveInt(Long value) {
if (value == null) {
return 0;
}
return Math.toIntExact(value);
}
/**
* Converts an object Long to a primitive long.
*
* @param value the Long value
* @return 0 if the given Long value is null else long value
*/
public static long toPrimitiveLong(Long value) {
if (value == null) {
return 0;
}
return value;
}
/**
* Creates an Odata filter string that can be used for filtering list results by tags.
*
* @param tagName the name of the tag. If not provided, all resources will be returned.
* @param tagValue the value of the tag. If not provided, only tag name will be filtered.
* @return the Odata filter to pass into list methods
*/
public static String createOdataFilterForTags(String tagName, String tagValue) {
if (tagName == null) {
return null;
} else if (tagValue == null) {
return String.format("tagname eq '%s'", tagName);
} else {
return String.format("tagname eq '%s' and tagvalue eq '%s'", tagName, tagValue);
}
}
/**
* Gets a Mono of type {@code U}, where U extends {@link Indexable}, that emits only the root
* resource from a given Mono of {@link Indexable}.
*
* @param stream the input Mono of {@link Indexable}
* @param <U> the specialized type of last item in the input stream
* @return a Mono that emits last item
*/
@SuppressWarnings("unchecked")
public static <U extends Indexable> Mono<U> rootResource(Mono<Indexable> stream) {
return stream.map(indexable -> (U) indexable);
}
/**
* Download a file asynchronously.
*
* @param url the URL pointing to the file
* @param httpPipeline the http pipeline
* @return an Observable pointing to the content of the file
*/
/**
* Get host from url.
*
* @param urlString the url string
* @return the host
* @throws MalformedURLException when url is invalid format
*/
public static String getHost(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String protocol = url.getProtocol();
String host = url.getAuthority();
return protocol + ":
}
/**
* Get path from url.
*
* @param urlString the url string
* @return the path
* @throws MalformedURLException when the url is invalid format
*/
public static String getPathAndQuery(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String path = url.getPath();
String query = url.getQuery();
if (query != null && !query.isEmpty()) {
path = path + "?" + query;
}
return path;
}
/**
* Adds a value to the list if does not already exists.
*
* @param list the list
* @param value value to add if not exists in the list
*/
public static void addToListIfNotExists(List<String> list, String value) {
boolean found = false;
for (String item : list) {
if (item.equalsIgnoreCase(value)) {
found = true;
break;
}
}
if (!found) {
list.add(value);
}
}
/**
* Removes a value from the list.
*
* @param list the list
* @param value value to remove
*/
public static void removeFromList(List<String> list, String value) {
int foundIndex = -1;
int i = 0;
for (String id : list) {
if (id.equalsIgnoreCase(value)) {
foundIndex = i;
break;
}
i++;
}
if (foundIndex != -1) {
list.remove(foundIndex);
}
}
/**
* @param id resource id
* @return resource group id for the resource id provided
*/
public static String resourceGroupId(String id) {
final ResourceId resourceId = ResourceId.fromString(id);
return String.format("/subscriptions/%s/resourceGroups/%s",
resourceId.subscriptionId(),
resourceId.resourceGroupName());
}
/**
* A Retrofit service used to download a file.
*/
@Host("{$host}")
@ServiceInterface(name = "FileService")
private interface FileService {
@Get("{path}")
Mono<SimpleResponse<Flux<ByteBuffer>>> download(
@HostParam("$host") String host, @PathParam(value = "path", encoded = true) String path);
}
/**
* Gets the only subscription as the default one in the tenant if applicable.
*
* @param subscriptions the list of subscriptions
* @throws IllegalStateException when no subscription or more than one subscription found
* @return the only subscription existing in the tenant
*/
public static String defaultSubscription(PagedIterable<Subscription> subscriptions) {
List<Subscription> subscriptionList = new ArrayList<>();
subscriptions.forEach(subscription -> {
subscriptionList.add(subscription);
});
if (subscriptionList.size() == 0) {
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptionList.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptionList.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException(stringBuilder.toString()));
}
return subscriptionList.get(0).subscriptionId();
}
/**
* Generates default scope for oauth2 from the specific request
* @param request a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromRequest(HttpRequest request, AzureEnvironment environment) {
return getDefaultScopeFromUrl(request.getUrl().toString().toLowerCase(Locale.ROOT), environment);
}
/**
* Generates default scope for oauth2 from the specific request
* @param url the url in lower case of a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromUrl(String url, AzureEnvironment environment) {
String resource = environment.getManagementEndpoint();
for (Map.Entry<String, String> endpoint : environment.endpoints().entrySet()) {
if (url.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(AzureEnvironment.Endpoint.KEYVAULT.identifier())) {
resource = String.format("https:
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.GRAPH.identifier())) {
resource = environment.getGraphEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment.getLogAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment.getApplicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment.getDataLakeEndpointResourceId();
break;
}
}
}
return removeTrailingSlash(resource) + "/.default";
}
/**
* Removes the trailing slash of the string.
* @param s the string
* @return the string without trailing slash
*/
public static String removeTrailingSlash(String s) {
if (s == null || s.length() == 0) {
return s;
}
if (s.endsWith("/")) {
return s.substring(0, s.length() - 1);
}
return s;
}
/**
* Get the Azure storage account connection string.
* @param accountName storage account name
* @param accountKey storage account key
* @param environment the Azure environment
* @return the storage account connection string.
*/
public static String getStorageConnectionString(String accountName, String accountKey,
AzureEnvironment environment) {
if (environment == null || environment.getStorageEndpointSuffix() == null) {
environment = AzureEnvironment.AZURE;
}
String suffix = environment.getStorageEndpointSuffix().replaceAll("^\\.*", "");
return String.format("DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=%s",
accountName, accountKey, suffix);
}
} |
I think it should be `getRawPath` and `getRawQuery` due to you just do a string add. | public static String getPathAndQuery(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String path = url.getPath();
String query = url.getQuery();
if (query != null && !query.isEmpty()) {
path = path + "?" + query;
}
return path;
} | String query = url.getQuery(); | public static String getPathAndQuery(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String path = url.getPath();
String query = url.getQuery();
if (query != null && !query.isEmpty()) {
path = path + "?" + query;
}
return path;
} | class Utils {
private Utils() {
}
/**
* Converts an object Boolean to a primitive boolean.
*
* @param value the Boolean value
* @return false if the given Boolean value is null or false else true
*/
public static boolean toPrimitiveBoolean(Boolean value) {
if (value == null) {
return false;
}
return value.booleanValue();
}
/**
* Converts an object Integer to a primitive int.
*
* @param value the Integer value
* @return 0 if the given Integer value is null else integer value
*/
public static int toPrimitiveInt(Integer value) {
if (value == null) {
return 0;
}
return value.intValue();
}
/**
* Converts an object Long to a primitive int.
*
* @param value the Long value
* @return 0 if the given Long value is null else integer value
*/
public static int toPrimitiveInt(Long value) {
if (value == null) {
return 0;
}
return Math.toIntExact(value);
}
/**
* Converts an object Long to a primitive long.
*
* @param value the Long value
* @return 0 if the given Long value is null else long value
*/
public static long toPrimitiveLong(Long value) {
if (value == null) {
return 0;
}
return value;
}
/**
* Creates an Odata filter string that can be used for filtering list results by tags.
*
* @param tagName the name of the tag. If not provided, all resources will be returned.
* @param tagValue the value of the tag. If not provided, only tag name will be filtered.
* @return the Odata filter to pass into list methods
*/
public static String createOdataFilterForTags(String tagName, String tagValue) {
if (tagName == null) {
return null;
} else if (tagValue == null) {
return String.format("tagname eq '%s'", tagName);
} else {
return String.format("tagname eq '%s' and tagvalue eq '%s'", tagName, tagValue);
}
}
/**
* Gets a Mono of type {@code U}, where U extends {@link Indexable}, that emits only the root
* resource from a given Mono of {@link Indexable}.
*
* @param stream the input Mono of {@link Indexable}
* @param <U> the specialized type of last item in the input stream
* @return a Mono that emits last item
*/
@SuppressWarnings("unchecked")
public static <U extends Indexable> Mono<U> rootResource(Mono<Indexable> stream) {
return stream.map(indexable -> (U) indexable);
}
/**
* Download a file asynchronously.
*
* @param url the URL pointing to the file
* @param httpPipeline the http pipeline
* @return an Observable pointing to the content of the file
*/
public static Mono<byte[]> downloadFileAsync(String url, HttpPipeline httpPipeline) {
FileService service = RestProxy.create(FileService.class, httpPipeline);
try {
return service.download(getHost(url), getPathAndQuery(url))
.flatMap(response -> FluxUtil.collectBytesInByteBufferStream(response.getValue()));
} catch (MalformedURLException ex) {
return Mono.empty();
}
}
/**
* Get host from url.
*
* @param urlString the url string
* @return the host
* @throws MalformedURLException when url is invalid format
*/
public static String getHost(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String protocol = url.getProtocol();
String host = url.getAuthority();
return protocol + ":
}
/**
* Get path from url.
*
* @param urlString the url string
* @return the path
* @throws MalformedURLException when the url is invalid format
*/
/**
* Adds a value to the list if does not already exists.
*
* @param list the list
* @param value value to add if not exists in the list
*/
public static void addToListIfNotExists(List<String> list, String value) {
boolean found = false;
for (String item : list) {
if (item.equalsIgnoreCase(value)) {
found = true;
break;
}
}
if (!found) {
list.add(value);
}
}
/**
* Removes a value from the list.
*
* @param list the list
* @param value value to remove
*/
public static void removeFromList(List<String> list, String value) {
int foundIndex = -1;
int i = 0;
for (String id : list) {
if (id.equalsIgnoreCase(value)) {
foundIndex = i;
break;
}
i++;
}
if (foundIndex != -1) {
list.remove(foundIndex);
}
}
/**
* @param id resource id
* @return resource group id for the resource id provided
*/
public static String resourceGroupId(String id) {
final ResourceId resourceId = ResourceId.fromString(id);
return String.format("/subscriptions/%s/resourceGroups/%s",
resourceId.subscriptionId(),
resourceId.resourceGroupName());
}
/**
* A Retrofit service used to download a file.
*/
@Host("{$host}")
@ServiceInterface(name = "FileService")
private interface FileService {
@Get("{path}")
Mono<SimpleResponse<Flux<ByteBuffer>>> download(
@HostParam("$host") String host, @PathParam(value = "path", encoded = true) String path);
}
/**
* Gets the only subscription as the default one in the tenant if applicable.
*
* @param subscriptions the list of subscriptions
* @throws IllegalStateException when no subscription or more than one subscription found
* @return the only subscription existing in the tenant
*/
public static String defaultSubscription(PagedIterable<Subscription> subscriptions) {
List<Subscription> subscriptionList = new ArrayList<>();
subscriptions.forEach(subscription -> {
subscriptionList.add(subscription);
});
if (subscriptionList.size() == 0) {
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptionList.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptionList.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException(stringBuilder.toString()));
}
return subscriptionList.get(0).subscriptionId();
}
/**
* Generates default scope for oauth2 from the specific request
* @param request a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromRequest(HttpRequest request, AzureEnvironment environment) {
return getDefaultScopeFromUrl(request.getUrl().toString().toLowerCase(Locale.ROOT), environment);
}
/**
* Generates default scope for oauth2 from the specific request
* @param url the url in lower case of a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromUrl(String url, AzureEnvironment environment) {
String resource = environment.getManagementEndpoint();
for (Map.Entry<String, String> endpoint : environment.endpoints().entrySet()) {
if (url.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(AzureEnvironment.Endpoint.KEYVAULT.identifier())) {
resource = String.format("https:
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.GRAPH.identifier())) {
resource = environment.getGraphEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment.getLogAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment.getApplicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment.getDataLakeEndpointResourceId();
break;
}
}
}
return removeTrailingSlash(resource) + "/.default";
}
/**
* Removes the trailing slash of the string.
* @param s the string
* @return the string without trailing slash
*/
public static String removeTrailingSlash(String s) {
if (s == null || s.length() == 0) {
return s;
}
if (s.endsWith("/")) {
return s.substring(0, s.length() - 1);
}
return s;
}
/**
* Get the Azure storage account connection string.
* @param accountName storage account name
* @param accountKey storage account key
* @param environment the Azure environment
* @return the storage account connection string.
*/
public static String getStorageConnectionString(String accountName, String accountKey,
AzureEnvironment environment) {
if (environment == null || environment.getStorageEndpointSuffix() == null) {
environment = AzureEnvironment.AZURE;
}
String suffix = environment.getStorageEndpointSuffix().replaceAll("^\\.*", "");
return String.format("DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=%s",
accountName, accountKey, suffix);
}
} | class Utils {
private Utils() {
}
/**
* Converts an object Boolean to a primitive boolean.
*
* @param value the Boolean value
* @return false if the given Boolean value is null or false else true
*/
public static boolean toPrimitiveBoolean(Boolean value) {
if (value == null) {
return false;
}
return value.booleanValue();
}
/**
* Converts an object Integer to a primitive int.
*
* @param value the Integer value
* @return 0 if the given Integer value is null else integer value
*/
public static int toPrimitiveInt(Integer value) {
if (value == null) {
return 0;
}
return value.intValue();
}
/**
* Converts an object Long to a primitive int.
*
* @param value the Long value
* @return 0 if the given Long value is null else integer value
*/
public static int toPrimitiveInt(Long value) {
if (value == null) {
return 0;
}
return Math.toIntExact(value);
}
/**
* Converts an object Long to a primitive long.
*
* @param value the Long value
* @return 0 if the given Long value is null else long value
*/
public static long toPrimitiveLong(Long value) {
if (value == null) {
return 0;
}
return value;
}
/**
* Creates an Odata filter string that can be used for filtering list results by tags.
*
* @param tagName the name of the tag. If not provided, all resources will be returned.
* @param tagValue the value of the tag. If not provided, only tag name will be filtered.
* @return the Odata filter to pass into list methods
*/
public static String createOdataFilterForTags(String tagName, String tagValue) {
if (tagName == null) {
return null;
} else if (tagValue == null) {
return String.format("tagname eq '%s'", tagName);
} else {
return String.format("tagname eq '%s' and tagvalue eq '%s'", tagName, tagValue);
}
}
/**
* Gets a Mono of type {@code U}, where U extends {@link Indexable}, that emits only the root
* resource from a given Mono of {@link Indexable}.
*
* @param stream the input Mono of {@link Indexable}
* @param <U> the specialized type of last item in the input stream
* @return a Mono that emits last item
*/
@SuppressWarnings("unchecked")
public static <U extends Indexable> Mono<U> rootResource(Mono<Indexable> stream) {
return stream.map(indexable -> (U) indexable);
}
/**
* Download a file asynchronously.
*
* @param url the URL pointing to the file
* @param httpPipeline the http pipeline
* @return an Observable pointing to the content of the file
*/
public static Mono<byte[]> downloadFileAsync(String url, HttpPipeline httpPipeline) {
FileService service = RestProxy.create(FileService.class, httpPipeline);
try {
return service.download(getHost(url), getPathAndQuery(url))
.flatMap(response -> FluxUtil.collectBytesInByteBufferStream(response.getValue()));
} catch (MalformedURLException ex) {
return Mono.error(() -> ex);
}
}
/**
* Get host from url.
*
* @param urlString the url string
* @return the host
* @throws MalformedURLException when url is invalid format
*/
public static String getHost(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String protocol = url.getProtocol();
String host = url.getAuthority();
return protocol + ":
}
/**
* Get path from url.
*
* @param urlString the url string
* @return the path
* @throws MalformedURLException when the url is invalid format
*/
/**
* Adds a value to the list if does not already exists.
*
* @param list the list
* @param value value to add if not exists in the list
*/
public static void addToListIfNotExists(List<String> list, String value) {
boolean found = false;
for (String item : list) {
if (item.equalsIgnoreCase(value)) {
found = true;
break;
}
}
if (!found) {
list.add(value);
}
}
/**
* Removes a value from the list.
*
* @param list the list
* @param value value to remove
*/
public static void removeFromList(List<String> list, String value) {
int foundIndex = -1;
int i = 0;
for (String id : list) {
if (id.equalsIgnoreCase(value)) {
foundIndex = i;
break;
}
i++;
}
if (foundIndex != -1) {
list.remove(foundIndex);
}
}
/**
* @param id resource id
* @return resource group id for the resource id provided
*/
public static String resourceGroupId(String id) {
final ResourceId resourceId = ResourceId.fromString(id);
return String.format("/subscriptions/%s/resourceGroups/%s",
resourceId.subscriptionId(),
resourceId.resourceGroupName());
}
/**
* A Retrofit service used to download a file.
*/
@Host("{$host}")
@ServiceInterface(name = "FileService")
private interface FileService {
@Get("{path}")
Mono<SimpleResponse<Flux<ByteBuffer>>> download(
@HostParam("$host") String host, @PathParam(value = "path", encoded = true) String path);
}
/**
* Gets the only subscription as the default one in the tenant if applicable.
*
* @param subscriptions the list of subscriptions
* @throws IllegalStateException when no subscription or more than one subscription found
* @return the only subscription existing in the tenant
*/
public static String defaultSubscription(PagedIterable<Subscription> subscriptions) {
List<Subscription> subscriptionList = new ArrayList<>();
subscriptions.forEach(subscription -> {
subscriptionList.add(subscription);
});
if (subscriptionList.size() == 0) {
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptionList.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptionList.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException(stringBuilder.toString()));
}
return subscriptionList.get(0).subscriptionId();
}
/**
* Generates default scope for oauth2 from the specific request
* @param request a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromRequest(HttpRequest request, AzureEnvironment environment) {
return getDefaultScopeFromUrl(request.getUrl().toString().toLowerCase(Locale.ROOT), environment);
}
/**
* Generates default scope for oauth2 from the specific request
* @param url the url in lower case of a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromUrl(String url, AzureEnvironment environment) {
String resource = environment.getManagementEndpoint();
for (Map.Entry<String, String> endpoint : environment.endpoints().entrySet()) {
if (url.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(AzureEnvironment.Endpoint.KEYVAULT.identifier())) {
resource = String.format("https:
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.GRAPH.identifier())) {
resource = environment.getGraphEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment.getLogAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment.getApplicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment.getDataLakeEndpointResourceId();
break;
}
}
}
return removeTrailingSlash(resource) + "/.default";
}
/**
* Removes the trailing slash of the string.
* @param s the string
* @return the string without trailing slash
*/
public static String removeTrailingSlash(String s) {
if (s == null || s.length() == 0) {
return s;
}
if (s.endsWith("/")) {
return s.substring(0, s.length() - 1);
}
return s;
}
/**
* Get the Azure storage account connection string.
* @param accountName storage account name
* @param accountKey storage account key
* @param environment the Azure environment
* @return the storage account connection string.
*/
public static String getStorageConnectionString(String accountName, String accountKey,
AzureEnvironment environment) {
if (environment == null || environment.getStorageEndpointSuffix() == null) {
environment = AzureEnvironment.AZURE;
}
String suffix = environment.getStorageEndpointSuffix().replaceAll("^\\.*", "");
return String.format("DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=%s",
accountName, accountKey, suffix);
}
} |
Not available for URL. Such methods are in URI. | public static String getPathAndQuery(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String path = url.getPath();
String query = url.getQuery();
if (query != null && !query.isEmpty()) {
path = path + "?" + query;
}
return path;
} | String query = url.getQuery(); | public static String getPathAndQuery(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String path = url.getPath();
String query = url.getQuery();
if (query != null && !query.isEmpty()) {
path = path + "?" + query;
}
return path;
} | class Utils {
private Utils() {
}
/**
* Converts an object Boolean to a primitive boolean.
*
* @param value the Boolean value
* @return false if the given Boolean value is null or false else true
*/
public static boolean toPrimitiveBoolean(Boolean value) {
if (value == null) {
return false;
}
return value.booleanValue();
}
/**
* Converts an object Integer to a primitive int.
*
* @param value the Integer value
* @return 0 if the given Integer value is null else integer value
*/
public static int toPrimitiveInt(Integer value) {
if (value == null) {
return 0;
}
return value.intValue();
}
/**
* Converts an object Long to a primitive int.
*
* @param value the Long value
* @return 0 if the given Long value is null else integer value
*/
public static int toPrimitiveInt(Long value) {
if (value == null) {
return 0;
}
return Math.toIntExact(value);
}
/**
* Converts an object Long to a primitive long.
*
* @param value the Long value
* @return 0 if the given Long value is null else long value
*/
public static long toPrimitiveLong(Long value) {
if (value == null) {
return 0;
}
return value;
}
/**
* Creates an Odata filter string that can be used for filtering list results by tags.
*
* @param tagName the name of the tag. If not provided, all resources will be returned.
* @param tagValue the value of the tag. If not provided, only tag name will be filtered.
* @return the Odata filter to pass into list methods
*/
public static String createOdataFilterForTags(String tagName, String tagValue) {
if (tagName == null) {
return null;
} else if (tagValue == null) {
return String.format("tagname eq '%s'", tagName);
} else {
return String.format("tagname eq '%s' and tagvalue eq '%s'", tagName, tagValue);
}
}
/**
* Gets a Mono of type {@code U}, where U extends {@link Indexable}, that emits only the root
* resource from a given Mono of {@link Indexable}.
*
* @param stream the input Mono of {@link Indexable}
* @param <U> the specialized type of last item in the input stream
* @return a Mono that emits last item
*/
@SuppressWarnings("unchecked")
public static <U extends Indexable> Mono<U> rootResource(Mono<Indexable> stream) {
return stream.map(indexable -> (U) indexable);
}
/**
* Download a file asynchronously.
*
* @param url the URL pointing to the file
* @param httpPipeline the http pipeline
* @return an Observable pointing to the content of the file
*/
public static Mono<byte[]> downloadFileAsync(String url, HttpPipeline httpPipeline) {
FileService service = RestProxy.create(FileService.class, httpPipeline);
try {
return service.download(getHost(url), getPathAndQuery(url))
.flatMap(response -> FluxUtil.collectBytesInByteBufferStream(response.getValue()));
} catch (MalformedURLException ex) {
return Mono.empty();
}
}
/**
* Get host from url.
*
* @param urlString the url string
* @return the host
* @throws MalformedURLException when url is invalid format
*/
public static String getHost(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String protocol = url.getProtocol();
String host = url.getAuthority();
return protocol + ":
}
/**
* Get path from url.
*
* @param urlString the url string
* @return the path
* @throws MalformedURLException when the url is invalid format
*/
/**
* Adds a value to the list if does not already exists.
*
* @param list the list
* @param value value to add if not exists in the list
*/
public static void addToListIfNotExists(List<String> list, String value) {
boolean found = false;
for (String item : list) {
if (item.equalsIgnoreCase(value)) {
found = true;
break;
}
}
if (!found) {
list.add(value);
}
}
/**
* Removes a value from the list.
*
* @param list the list
* @param value value to remove
*/
public static void removeFromList(List<String> list, String value) {
int foundIndex = -1;
int i = 0;
for (String id : list) {
if (id.equalsIgnoreCase(value)) {
foundIndex = i;
break;
}
i++;
}
if (foundIndex != -1) {
list.remove(foundIndex);
}
}
/**
* @param id resource id
* @return resource group id for the resource id provided
*/
public static String resourceGroupId(String id) {
final ResourceId resourceId = ResourceId.fromString(id);
return String.format("/subscriptions/%s/resourceGroups/%s",
resourceId.subscriptionId(),
resourceId.resourceGroupName());
}
/**
* A Retrofit service used to download a file.
*/
@Host("{$host}")
@ServiceInterface(name = "FileService")
private interface FileService {
@Get("{path}")
Mono<SimpleResponse<Flux<ByteBuffer>>> download(
@HostParam("$host") String host, @PathParam(value = "path", encoded = true) String path);
}
/**
* Gets the only subscription as the default one in the tenant if applicable.
*
* @param subscriptions the list of subscriptions
* @throws IllegalStateException when no subscription or more than one subscription found
* @return the only subscription existing in the tenant
*/
public static String defaultSubscription(PagedIterable<Subscription> subscriptions) {
List<Subscription> subscriptionList = new ArrayList<>();
subscriptions.forEach(subscription -> {
subscriptionList.add(subscription);
});
if (subscriptionList.size() == 0) {
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptionList.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptionList.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException(stringBuilder.toString()));
}
return subscriptionList.get(0).subscriptionId();
}
/**
* Generates default scope for oauth2 from the specific request
* @param request a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromRequest(HttpRequest request, AzureEnvironment environment) {
return getDefaultScopeFromUrl(request.getUrl().toString().toLowerCase(Locale.ROOT), environment);
}
/**
* Generates default scope for oauth2 from the specific request
* @param url the url in lower case of a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromUrl(String url, AzureEnvironment environment) {
String resource = environment.getManagementEndpoint();
for (Map.Entry<String, String> endpoint : environment.endpoints().entrySet()) {
if (url.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(AzureEnvironment.Endpoint.KEYVAULT.identifier())) {
resource = String.format("https:
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.GRAPH.identifier())) {
resource = environment.getGraphEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment.getLogAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment.getApplicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment.getDataLakeEndpointResourceId();
break;
}
}
}
return removeTrailingSlash(resource) + "/.default";
}
/**
* Removes the trailing slash of the string.
* @param s the string
* @return the string without trailing slash
*/
public static String removeTrailingSlash(String s) {
if (s == null || s.length() == 0) {
return s;
}
if (s.endsWith("/")) {
return s.substring(0, s.length() - 1);
}
return s;
}
/**
* Get the Azure storage account connection string.
* @param accountName storage account name
* @param accountKey storage account key
* @param environment the Azure environment
* @return the storage account connection string.
*/
public static String getStorageConnectionString(String accountName, String accountKey,
AzureEnvironment environment) {
if (environment == null || environment.getStorageEndpointSuffix() == null) {
environment = AzureEnvironment.AZURE;
}
String suffix = environment.getStorageEndpointSuffix().replaceAll("^\\.*", "");
return String.format("DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=%s",
accountName, accountKey, suffix);
}
} | class Utils {
private Utils() {
}
/**
* Converts an object Boolean to a primitive boolean.
*
* @param value the Boolean value
* @return false if the given Boolean value is null or false else true
*/
public static boolean toPrimitiveBoolean(Boolean value) {
if (value == null) {
return false;
}
return value.booleanValue();
}
/**
* Converts an object Integer to a primitive int.
*
* @param value the Integer value
* @return 0 if the given Integer value is null else integer value
*/
public static int toPrimitiveInt(Integer value) {
if (value == null) {
return 0;
}
return value.intValue();
}
/**
* Converts an object Long to a primitive int.
*
* @param value the Long value
* @return 0 if the given Long value is null else integer value
*/
public static int toPrimitiveInt(Long value) {
if (value == null) {
return 0;
}
return Math.toIntExact(value);
}
/**
* Converts an object Long to a primitive long.
*
* @param value the Long value
* @return 0 if the given Long value is null else long value
*/
public static long toPrimitiveLong(Long value) {
if (value == null) {
return 0;
}
return value;
}
/**
* Creates an Odata filter string that can be used for filtering list results by tags.
*
* @param tagName the name of the tag. If not provided, all resources will be returned.
* @param tagValue the value of the tag. If not provided, only tag name will be filtered.
* @return the Odata filter to pass into list methods
*/
public static String createOdataFilterForTags(String tagName, String tagValue) {
if (tagName == null) {
return null;
} else if (tagValue == null) {
return String.format("tagname eq '%s'", tagName);
} else {
return String.format("tagname eq '%s' and tagvalue eq '%s'", tagName, tagValue);
}
}
/**
* Gets a Mono of type {@code U}, where U extends {@link Indexable}, that emits only the root
* resource from a given Mono of {@link Indexable}.
*
* @param stream the input Mono of {@link Indexable}
* @param <U> the specialized type of last item in the input stream
* @return a Mono that emits last item
*/
@SuppressWarnings("unchecked")
public static <U extends Indexable> Mono<U> rootResource(Mono<Indexable> stream) {
return stream.map(indexable -> (U) indexable);
}
/**
* Download a file asynchronously.
*
* @param url the URL pointing to the file
* @param httpPipeline the http pipeline
* @return an Observable pointing to the content of the file
*/
public static Mono<byte[]> downloadFileAsync(String url, HttpPipeline httpPipeline) {
FileService service = RestProxy.create(FileService.class, httpPipeline);
try {
return service.download(getHost(url), getPathAndQuery(url))
.flatMap(response -> FluxUtil.collectBytesInByteBufferStream(response.getValue()));
} catch (MalformedURLException ex) {
return Mono.error(() -> ex);
}
}
/**
* Get host from url.
*
* @param urlString the url string
* @return the host
* @throws MalformedURLException when url is invalid format
*/
public static String getHost(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String protocol = url.getProtocol();
String host = url.getAuthority();
return protocol + ":
}
/**
* Get path from url.
*
* @param urlString the url string
* @return the path
* @throws MalformedURLException when the url is invalid format
*/
/**
* Adds a value to the list if does not already exists.
*
* @param list the list
* @param value value to add if not exists in the list
*/
public static void addToListIfNotExists(List<String> list, String value) {
boolean found = false;
for (String item : list) {
if (item.equalsIgnoreCase(value)) {
found = true;
break;
}
}
if (!found) {
list.add(value);
}
}
/**
* Removes a value from the list.
*
* @param list the list
* @param value value to remove
*/
public static void removeFromList(List<String> list, String value) {
int foundIndex = -1;
int i = 0;
for (String id : list) {
if (id.equalsIgnoreCase(value)) {
foundIndex = i;
break;
}
i++;
}
if (foundIndex != -1) {
list.remove(foundIndex);
}
}
/**
* @param id resource id
* @return resource group id for the resource id provided
*/
public static String resourceGroupId(String id) {
final ResourceId resourceId = ResourceId.fromString(id);
return String.format("/subscriptions/%s/resourceGroups/%s",
resourceId.subscriptionId(),
resourceId.resourceGroupName());
}
/**
* A Retrofit service used to download a file.
*/
@Host("{$host}")
@ServiceInterface(name = "FileService")
private interface FileService {
@Get("{path}")
Mono<SimpleResponse<Flux<ByteBuffer>>> download(
@HostParam("$host") String host, @PathParam(value = "path", encoded = true) String path);
}
/**
* Gets the only subscription as the default one in the tenant if applicable.
*
* @param subscriptions the list of subscriptions
* @throws IllegalStateException when no subscription or more than one subscription found
* @return the only subscription existing in the tenant
*/
public static String defaultSubscription(PagedIterable<Subscription> subscriptions) {
List<Subscription> subscriptionList = new ArrayList<>();
subscriptions.forEach(subscription -> {
subscriptionList.add(subscription);
});
if (subscriptionList.size() == 0) {
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptionList.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptionList.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException(stringBuilder.toString()));
}
return subscriptionList.get(0).subscriptionId();
}
/**
* Generates default scope for oauth2 from the specific request
* @param request a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromRequest(HttpRequest request, AzureEnvironment environment) {
return getDefaultScopeFromUrl(request.getUrl().toString().toLowerCase(Locale.ROOT), environment);
}
/**
* Generates default scope for oauth2 from the specific request
* @param url the url in lower case of a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromUrl(String url, AzureEnvironment environment) {
String resource = environment.getManagementEndpoint();
for (Map.Entry<String, String> endpoint : environment.endpoints().entrySet()) {
if (url.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(AzureEnvironment.Endpoint.KEYVAULT.identifier())) {
resource = String.format("https:
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.GRAPH.identifier())) {
resource = environment.getGraphEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment.getLogAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment.getApplicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment.getDataLakeEndpointResourceId();
break;
}
}
}
return removeTrailingSlash(resource) + "/.default";
}
/**
* Removes the trailing slash of the string.
* @param s the string
* @return the string without trailing slash
*/
public static String removeTrailingSlash(String s) {
if (s == null || s.length() == 0) {
return s;
}
if (s.endsWith("/")) {
return s.substring(0, s.length() - 1);
}
return s;
}
/**
* Get the Azure storage account connection string.
* @param accountName storage account name
* @param accountKey storage account key
* @param environment the Azure environment
* @return the storage account connection string.
*/
public static String getStorageConnectionString(String accountName, String accountKey,
AzureEnvironment environment) {
if (environment == null || environment.getStorageEndpointSuffix() == null) {
environment = AzureEnvironment.AZURE;
}
String suffix = environment.getStorageEndpointSuffix().replaceAll("^\\.*", "");
return String.format("DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=%s",
accountName, accountKey, suffix);
}
} |
Could use a bit testing (send one to httpbin.org, or use your fiddler). | public static String getPathAndQuery(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String path = url.getPath();
String query = url.getQuery();
if (query != null && !query.isEmpty()) {
path = path + "?" + query;
}
return path;
} | String query = url.getQuery(); | public static String getPathAndQuery(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String path = url.getPath();
String query = url.getQuery();
if (query != null && !query.isEmpty()) {
path = path + "?" + query;
}
return path;
} | class Utils {
private Utils() {
}
/**
* Converts an object Boolean to a primitive boolean.
*
* @param value the Boolean value
* @return false if the given Boolean value is null or false else true
*/
public static boolean toPrimitiveBoolean(Boolean value) {
if (value == null) {
return false;
}
return value.booleanValue();
}
/**
* Converts an object Integer to a primitive int.
*
* @param value the Integer value
* @return 0 if the given Integer value is null else integer value
*/
public static int toPrimitiveInt(Integer value) {
if (value == null) {
return 0;
}
return value.intValue();
}
/**
* Converts an object Long to a primitive int.
*
* @param value the Long value
* @return 0 if the given Long value is null else integer value
*/
public static int toPrimitiveInt(Long value) {
if (value == null) {
return 0;
}
return Math.toIntExact(value);
}
/**
* Converts an object Long to a primitive long.
*
* @param value the Long value
* @return 0 if the given Long value is null else long value
*/
public static long toPrimitiveLong(Long value) {
if (value == null) {
return 0;
}
return value;
}
/**
* Creates an Odata filter string that can be used for filtering list results by tags.
*
* @param tagName the name of the tag. If not provided, all resources will be returned.
* @param tagValue the value of the tag. If not provided, only tag name will be filtered.
* @return the Odata filter to pass into list methods
*/
public static String createOdataFilterForTags(String tagName, String tagValue) {
if (tagName == null) {
return null;
} else if (tagValue == null) {
return String.format("tagname eq '%s'", tagName);
} else {
return String.format("tagname eq '%s' and tagvalue eq '%s'", tagName, tagValue);
}
}
/**
* Gets a Mono of type {@code U}, where U extends {@link Indexable}, that emits only the root
* resource from a given Mono of {@link Indexable}.
*
* @param stream the input Mono of {@link Indexable}
* @param <U> the specialized type of last item in the input stream
* @return a Mono that emits last item
*/
@SuppressWarnings("unchecked")
public static <U extends Indexable> Mono<U> rootResource(Mono<Indexable> stream) {
return stream.map(indexable -> (U) indexable);
}
/**
* Download a file asynchronously.
*
* @param url the URL pointing to the file
* @param httpPipeline the http pipeline
* @return an Observable pointing to the content of the file
*/
public static Mono<byte[]> downloadFileAsync(String url, HttpPipeline httpPipeline) {
FileService service = RestProxy.create(FileService.class, httpPipeline);
try {
return service.download(getHost(url), getPathAndQuery(url))
.flatMap(response -> FluxUtil.collectBytesInByteBufferStream(response.getValue()));
} catch (MalformedURLException ex) {
return Mono.empty();
}
}
/**
* Get host from url.
*
* @param urlString the url string
* @return the host
* @throws MalformedURLException when url is invalid format
*/
public static String getHost(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String protocol = url.getProtocol();
String host = url.getAuthority();
return protocol + ":
}
/**
* Get path from url.
*
* @param urlString the url string
* @return the path
* @throws MalformedURLException when the url is invalid format
*/
/**
* Adds a value to the list if does not already exists.
*
* @param list the list
* @param value value to add if not exists in the list
*/
public static void addToListIfNotExists(List<String> list, String value) {
boolean found = false;
for (String item : list) {
if (item.equalsIgnoreCase(value)) {
found = true;
break;
}
}
if (!found) {
list.add(value);
}
}
/**
* Removes a value from the list.
*
* @param list the list
* @param value value to remove
*/
public static void removeFromList(List<String> list, String value) {
int foundIndex = -1;
int i = 0;
for (String id : list) {
if (id.equalsIgnoreCase(value)) {
foundIndex = i;
break;
}
i++;
}
if (foundIndex != -1) {
list.remove(foundIndex);
}
}
/**
* @param id resource id
* @return resource group id for the resource id provided
*/
public static String resourceGroupId(String id) {
final ResourceId resourceId = ResourceId.fromString(id);
return String.format("/subscriptions/%s/resourceGroups/%s",
resourceId.subscriptionId(),
resourceId.resourceGroupName());
}
/**
* A Retrofit service used to download a file.
*/
@Host("{$host}")
@ServiceInterface(name = "FileService")
private interface FileService {
@Get("{path}")
Mono<SimpleResponse<Flux<ByteBuffer>>> download(
@HostParam("$host") String host, @PathParam(value = "path", encoded = true) String path);
}
/**
* Gets the only subscription as the default one in the tenant if applicable.
*
* @param subscriptions the list of subscriptions
* @throws IllegalStateException when no subscription or more than one subscription found
* @return the only subscription existing in the tenant
*/
public static String defaultSubscription(PagedIterable<Subscription> subscriptions) {
List<Subscription> subscriptionList = new ArrayList<>();
subscriptions.forEach(subscription -> {
subscriptionList.add(subscription);
});
if (subscriptionList.size() == 0) {
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptionList.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptionList.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException(stringBuilder.toString()));
}
return subscriptionList.get(0).subscriptionId();
}
/**
* Generates default scope for oauth2 from the specific request
* @param request a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromRequest(HttpRequest request, AzureEnvironment environment) {
return getDefaultScopeFromUrl(request.getUrl().toString().toLowerCase(Locale.ROOT), environment);
}
/**
* Generates default scope for oauth2 from the specific request
* @param url the url in lower case of a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromUrl(String url, AzureEnvironment environment) {
String resource = environment.getManagementEndpoint();
for (Map.Entry<String, String> endpoint : environment.endpoints().entrySet()) {
if (url.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(AzureEnvironment.Endpoint.KEYVAULT.identifier())) {
resource = String.format("https:
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.GRAPH.identifier())) {
resource = environment.getGraphEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment.getLogAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment.getApplicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment.getDataLakeEndpointResourceId();
break;
}
}
}
return removeTrailingSlash(resource) + "/.default";
}
/**
* Removes the trailing slash of the string.
* @param s the string
* @return the string without trailing slash
*/
public static String removeTrailingSlash(String s) {
if (s == null || s.length() == 0) {
return s;
}
if (s.endsWith("/")) {
return s.substring(0, s.length() - 1);
}
return s;
}
/**
* Get the Azure storage account connection string.
* @param accountName storage account name
* @param accountKey storage account key
* @param environment the Azure environment
* @return the storage account connection string.
*/
public static String getStorageConnectionString(String accountName, String accountKey,
AzureEnvironment environment) {
if (environment == null || environment.getStorageEndpointSuffix() == null) {
environment = AzureEnvironment.AZURE;
}
String suffix = environment.getStorageEndpointSuffix().replaceAll("^\\.*", "");
return String.format("DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=%s",
accountName, accountKey, suffix);
}
} | class Utils {
private Utils() {
}
/**
* Converts an object Boolean to a primitive boolean.
*
* @param value the Boolean value
* @return false if the given Boolean value is null or false else true
*/
public static boolean toPrimitiveBoolean(Boolean value) {
if (value == null) {
return false;
}
return value.booleanValue();
}
/**
* Converts an object Integer to a primitive int.
*
* @param value the Integer value
* @return 0 if the given Integer value is null else integer value
*/
public static int toPrimitiveInt(Integer value) {
if (value == null) {
return 0;
}
return value.intValue();
}
/**
* Converts an object Long to a primitive int.
*
* @param value the Long value
* @return 0 if the given Long value is null else integer value
*/
public static int toPrimitiveInt(Long value) {
if (value == null) {
return 0;
}
return Math.toIntExact(value);
}
/**
* Converts an object Long to a primitive long.
*
* @param value the Long value
* @return 0 if the given Long value is null else long value
*/
public static long toPrimitiveLong(Long value) {
if (value == null) {
return 0;
}
return value;
}
/**
* Creates an Odata filter string that can be used for filtering list results by tags.
*
* @param tagName the name of the tag. If not provided, all resources will be returned.
* @param tagValue the value of the tag. If not provided, only tag name will be filtered.
* @return the Odata filter to pass into list methods
*/
public static String createOdataFilterForTags(String tagName, String tagValue) {
if (tagName == null) {
return null;
} else if (tagValue == null) {
return String.format("tagname eq '%s'", tagName);
} else {
return String.format("tagname eq '%s' and tagvalue eq '%s'", tagName, tagValue);
}
}
/**
* Gets a Mono of type {@code U}, where U extends {@link Indexable}, that emits only the root
* resource from a given Mono of {@link Indexable}.
*
* @param stream the input Mono of {@link Indexable}
* @param <U> the specialized type of last item in the input stream
* @return a Mono that emits last item
*/
@SuppressWarnings("unchecked")
public static <U extends Indexable> Mono<U> rootResource(Mono<Indexable> stream) {
return stream.map(indexable -> (U) indexable);
}
/**
* Download a file asynchronously.
*
* @param url the URL pointing to the file
* @param httpPipeline the http pipeline
* @return an Observable pointing to the content of the file
*/
public static Mono<byte[]> downloadFileAsync(String url, HttpPipeline httpPipeline) {
FileService service = RestProxy.create(FileService.class, httpPipeline);
try {
return service.download(getHost(url), getPathAndQuery(url))
.flatMap(response -> FluxUtil.collectBytesInByteBufferStream(response.getValue()));
} catch (MalformedURLException ex) {
return Mono.error(() -> ex);
}
}
/**
* Get host from url.
*
* @param urlString the url string
* @return the host
* @throws MalformedURLException when url is invalid format
*/
public static String getHost(String urlString) throws MalformedURLException {
URL url = new URL(urlString);
String protocol = url.getProtocol();
String host = url.getAuthority();
return protocol + ":
}
/**
* Get path from url.
*
* @param urlString the url string
* @return the path
* @throws MalformedURLException when the url is invalid format
*/
/**
* Adds a value to the list if does not already exists.
*
* @param list the list
* @param value value to add if not exists in the list
*/
public static void addToListIfNotExists(List<String> list, String value) {
boolean found = false;
for (String item : list) {
if (item.equalsIgnoreCase(value)) {
found = true;
break;
}
}
if (!found) {
list.add(value);
}
}
/**
* Removes a value from the list.
*
* @param list the list
* @param value value to remove
*/
public static void removeFromList(List<String> list, String value) {
int foundIndex = -1;
int i = 0;
for (String id : list) {
if (id.equalsIgnoreCase(value)) {
foundIndex = i;
break;
}
i++;
}
if (foundIndex != -1) {
list.remove(foundIndex);
}
}
/**
* @param id resource id
* @return resource group id for the resource id provided
*/
public static String resourceGroupId(String id) {
final ResourceId resourceId = ResourceId.fromString(id);
return String.format("/subscriptions/%s/resourceGroups/%s",
resourceId.subscriptionId(),
resourceId.resourceGroupName());
}
/**
* A Retrofit service used to download a file.
*/
@Host("{$host}")
@ServiceInterface(name = "FileService")
private interface FileService {
@Get("{path}")
Mono<SimpleResponse<Flux<ByteBuffer>>> download(
@HostParam("$host") String host, @PathParam(value = "path", encoded = true) String path);
}
/**
* Gets the only subscription as the default one in the tenant if applicable.
*
* @param subscriptions the list of subscriptions
* @throws IllegalStateException when no subscription or more than one subscription found
* @return the only subscription existing in the tenant
*/
public static String defaultSubscription(PagedIterable<Subscription> subscriptions) {
List<Subscription> subscriptionList = new ArrayList<>();
subscriptions.forEach(subscription -> {
subscriptionList.add(subscription);
});
if (subscriptionList.size() == 0) {
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptionList.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptionList.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw new ClientLogger(Utils.class).logExceptionAsError(
new IllegalStateException(stringBuilder.toString()));
}
return subscriptionList.get(0).subscriptionId();
}
/**
* Generates default scope for oauth2 from the specific request
* @param request a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromRequest(HttpRequest request, AzureEnvironment environment) {
return getDefaultScopeFromUrl(request.getUrl().toString().toLowerCase(Locale.ROOT), environment);
}
/**
* Generates default scope for oauth2 from the specific request
* @param url the url in lower case of a http request
* @param environment the azure environment with current request
* @return the default scope
*/
public static String getDefaultScopeFromUrl(String url, AzureEnvironment environment) {
String resource = environment.getManagementEndpoint();
for (Map.Entry<String, String> endpoint : environment.endpoints().entrySet()) {
if (url.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(AzureEnvironment.Endpoint.KEYVAULT.identifier())) {
resource = String.format("https:
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.GRAPH.identifier())) {
resource = environment.getGraphEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment.getLogAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment.getApplicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(AzureEnvironment.Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment.getDataLakeEndpointResourceId();
break;
}
}
}
return removeTrailingSlash(resource) + "/.default";
}
/**
* Removes the trailing slash of the string.
* @param s the string
* @return the string without trailing slash
*/
public static String removeTrailingSlash(String s) {
if (s == null || s.length() == 0) {
return s;
}
if (s.endsWith("/")) {
return s.substring(0, s.length() - 1);
}
return s;
}
/**
* Get the Azure storage account connection string.
* @param accountName storage account name
* @param accountKey storage account key
* @param environment the Azure environment
* @return the storage account connection string.
*/
public static String getStorageConnectionString(String accountName, String accountKey,
AzureEnvironment environment) {
if (environment == null || environment.getStorageEndpointSuffix() == null) {
environment = AzureEnvironment.AZURE;
}
String suffix = environment.getStorageEndpointSuffix().replaceAll("^\\.*", "");
return String.format("DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=%s",
accountName, accountKey, suffix);
}
} |
I think this is in-correct. Don't we want to use `getIdleTcpConnectionTimeout` ? | public Builder(ConnectionPolicy connectionPolicy) {
this.bufferPageSize = DEFAULT_OPTIONS.bufferPageSize;
this.connectionAcquisitionTimeout = DEFAULT_OPTIONS.connectionAcquisitionTimeout;
this.connectTimeout = connectionPolicy.getConnectTimeout();
this.idleChannelTimeout = connectionPolicy.getIdleConnectionTimeout();
this.idleEndpointTimeout = DEFAULT_OPTIONS.idleEndpointTimeout;
this.maxBufferCapacity = DEFAULT_OPTIONS.maxBufferCapacity;
this.maxChannelsPerEndpoint = connectionPolicy.getMaxConnectionsPerEndpoint();
this.maxRequestsPerChannel = connectionPolicy.getMaxRequestsPerConnection();
this.receiveHangDetectionTime = DEFAULT_OPTIONS.receiveHangDetectionTime;
this.requestExpiryInterval = DEFAULT_OPTIONS.requestExpiryInterval;
this.requestTimeout = connectionPolicy.getRequestTimeout();
this.requestTimerResolution = DEFAULT_OPTIONS.requestTimerResolution;
this.sendHangDetectionTime = DEFAULT_OPTIONS.sendHangDetectionTime;
this.shutdownTimeout = DEFAULT_OPTIONS.shutdownTimeout;
this.threadCount = DEFAULT_OPTIONS.threadCount;
this.userAgent = DEFAULT_OPTIONS.userAgent;
} | this.idleChannelTimeout = connectionPolicy.getIdleConnectionTimeout(); | public Builder(ConnectionPolicy connectionPolicy) {
this.bufferPageSize = DEFAULT_OPTIONS.bufferPageSize;
this.connectionAcquisitionTimeout = DEFAULT_OPTIONS.connectionAcquisitionTimeout;
this.connectTimeout = connectionPolicy.getConnectTimeout();
this.idleChannelTimeout = connectionPolicy.getIdleTcpConnectionTimeout();
this.idleEndpointTimeout = DEFAULT_OPTIONS.idleEndpointTimeout;
this.maxBufferCapacity = DEFAULT_OPTIONS.maxBufferCapacity;
this.maxChannelsPerEndpoint = connectionPolicy.getMaxConnectionsPerEndpoint();
this.maxRequestsPerChannel = connectionPolicy.getMaxRequestsPerConnection();
this.receiveHangDetectionTime = DEFAULT_OPTIONS.receiveHangDetectionTime;
this.requestExpiryInterval = DEFAULT_OPTIONS.requestExpiryInterval;
this.requestTimeout = connectionPolicy.getRequestTimeout();
this.requestTimerResolution = DEFAULT_OPTIONS.requestTimerResolution;
this.sendHangDetectionTime = DEFAULT_OPTIONS.sendHangDetectionTime;
this.shutdownTimeout = DEFAULT_OPTIONS.shutdownTimeout;
this.threadCount = DEFAULT_OPTIONS.threadCount;
this.userAgent = DEFAULT_OPTIONS.userAgent;
} | class Builder {
private static final String DEFAULT_OPTIONS_PROPERTY_NAME = "azure.cosmos.directTcp.defaultOptions";
private static final Options DEFAULT_OPTIONS;
static {
Options options = null;
try {
final String string = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME);
if (string != null) {
try {
options = RntbdObjectMapper.readValue(string, Options.class);
} catch (IOException error) {
logger.error("failed to parse default Direct TCP options {} due to ", string, error);
}
}
if (options == null) {
final String path = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME + "File");
if (path != null) {
try {
options = RntbdObjectMapper.readValue(new File(path), Options.class);
} catch (IOException error) {
logger.error("failed to load default Direct TCP options from {} due to ", path, error);
}
}
}
if (options == null) {
final ClassLoader loader = RntbdTransportClient.class.getClassLoader();
final String name = DEFAULT_OPTIONS_PROPERTY_NAME + ".json";
try (InputStream stream = loader.getResourceAsStream(name)) {
if (stream != null) {
options = RntbdObjectMapper.readValue(stream, Options.class);
}
} catch (IOException error) {
logger.error("failed to load Direct TCP options from resource {} due to ", name, error);
}
}
} finally {
if (options == null) {
DEFAULT_OPTIONS = new Options(ConnectionPolicy.getDefaultPolicy());
} else {
logger.info("Updated default Direct TCP options from system property {}: {}",
DEFAULT_OPTIONS_PROPERTY_NAME,
options);
DEFAULT_OPTIONS = options;
}
}
}
private int bufferPageSize;
private Duration connectionAcquisitionTimeout;
private Duration connectTimeout;
private Duration idleChannelTimeout;
private Duration idleEndpointTimeout;
private int maxBufferCapacity;
private int maxChannelsPerEndpoint;
private int maxRequestsPerChannel;
private Duration receiveHangDetectionTime;
private Duration requestExpiryInterval;
private Duration requestTimeout;
private Duration requestTimerResolution;
private Duration sendHangDetectionTime;
private Duration shutdownTimeout;
private int threadCount;
private UserAgentContainer userAgent;
public Builder bufferPageSize(final int value) {
checkArgument(value >= 4096 && (value & (value - 1)) == 0,
"expected value to be a power of 2 >= 4096, not %s",
value);
this.bufferPageSize = value;
return this;
}
public Options build() {
checkState(this.bufferPageSize <= this.maxBufferCapacity,
"expected bufferPageSize (%s) <= maxBufferCapacity (%s)",
this.bufferPageSize,
this.maxBufferCapacity);
return new Options(this);
}
public Builder connectionAcquisitionTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.connectTimeout = value.compareTo(Duration.ZERO) < 0 ? Duration.ZERO : value;
return this;
}
public Builder connectionTimeout(final Duration value) {
checkArgument(value == null || value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.connectTimeout = value;
return this;
}
public Builder idleChannelTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.idleChannelTimeout = value;
return this;
}
public Builder idleEndpointTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.idleEndpointTimeout = value;
return this;
}
public Builder maxBufferCapacity(final int value) {
checkArgument(value > 0 && (value & (value - 1)) == 0,
"expected positive value, not %s",
value);
this.maxBufferCapacity = value;
return this;
}
public Builder maxChannelsPerEndpoint(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxChannelsPerEndpoint = value;
return this;
}
public Builder maxRequestsPerChannel(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxRequestsPerChannel = value;
return this;
}
public Builder receiveHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.receiveHangDetectionTime = value;
return this;
}
public Builder requestExpiryInterval(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestExpiryInterval = value;
return this;
}
public Builder requestTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimeout = value;
return this;
}
public Builder requestTimerResolution(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimerResolution = value;
return this;
}
public Builder sendHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.sendHangDetectionTime = value;
return this;
}
public Builder shutdownTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.shutdownTimeout = value;
return this;
}
public Builder threadCount(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.threadCount = value;
return this;
}
public Builder userAgent(final UserAgentContainer value) {
checkNotNull(value, "expected non-null value");
this.userAgent = value;
return this;
}
} | class Builder {
private static final String DEFAULT_OPTIONS_PROPERTY_NAME = "azure.cosmos.directTcp.defaultOptions";
private static final Options DEFAULT_OPTIONS;
static {
Options options = null;
try {
final String string = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME);
if (string != null) {
try {
options = RntbdObjectMapper.readValue(string, Options.class);
} catch (IOException error) {
logger.error("failed to parse default Direct TCP options {} due to ", string, error);
}
}
if (options == null) {
final String path = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME + "File");
if (path != null) {
try {
options = RntbdObjectMapper.readValue(new File(path), Options.class);
} catch (IOException error) {
logger.error("failed to load default Direct TCP options from {} due to ", path, error);
}
}
}
if (options == null) {
final ClassLoader loader = RntbdTransportClient.class.getClassLoader();
final String name = DEFAULT_OPTIONS_PROPERTY_NAME + ".json";
try (InputStream stream = loader.getResourceAsStream(name)) {
if (stream != null) {
options = RntbdObjectMapper.readValue(stream, Options.class);
}
} catch (IOException error) {
logger.error("failed to load Direct TCP options from resource {} due to ", name, error);
}
}
} finally {
if (options == null) {
DEFAULT_OPTIONS = new Options(ConnectionPolicy.getDefaultPolicy());
} else {
logger.info("Updated default Direct TCP options from system property {}: {}",
DEFAULT_OPTIONS_PROPERTY_NAME,
options);
DEFAULT_OPTIONS = options;
}
}
}
private int bufferPageSize;
private Duration connectionAcquisitionTimeout;
private Duration connectTimeout;
private Duration idleChannelTimeout;
private Duration idleEndpointTimeout;
private int maxBufferCapacity;
private int maxChannelsPerEndpoint;
private int maxRequestsPerChannel;
private Duration receiveHangDetectionTime;
private Duration requestExpiryInterval;
private Duration requestTimeout;
private Duration requestTimerResolution;
private Duration sendHangDetectionTime;
private Duration shutdownTimeout;
private int threadCount;
private UserAgentContainer userAgent;
public Builder bufferPageSize(final int value) {
checkArgument(value >= 4096 && (value & (value - 1)) == 0,
"expected value to be a power of 2 >= 4096, not %s",
value);
this.bufferPageSize = value;
return this;
}
public Options build() {
checkState(this.bufferPageSize <= this.maxBufferCapacity,
"expected bufferPageSize (%s) <= maxBufferCapacity (%s)",
this.bufferPageSize,
this.maxBufferCapacity);
return new Options(this);
}
public Builder connectionAcquisitionTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.connectTimeout = value.compareTo(Duration.ZERO) < 0 ? Duration.ZERO : value;
return this;
}
public Builder connectionTimeout(final Duration value) {
checkArgument(value == null || value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.connectTimeout = value;
return this;
}
public Builder idleChannelTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.idleChannelTimeout = value;
return this;
}
public Builder idleEndpointTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.idleEndpointTimeout = value;
return this;
}
public Builder maxBufferCapacity(final int value) {
checkArgument(value > 0 && (value & (value - 1)) == 0,
"expected positive value, not %s",
value);
this.maxBufferCapacity = value;
return this;
}
public Builder maxChannelsPerEndpoint(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxChannelsPerEndpoint = value;
return this;
}
public Builder maxRequestsPerChannel(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxRequestsPerChannel = value;
return this;
}
public Builder receiveHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.receiveHangDetectionTime = value;
return this;
}
public Builder requestExpiryInterval(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestExpiryInterval = value;
return this;
}
public Builder requestTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimeout = value;
return this;
}
public Builder requestTimerResolution(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimerResolution = value;
return this;
}
public Builder sendHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.sendHangDetectionTime = value;
return this;
}
public Builder shutdownTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.shutdownTimeout = value;
return this;
}
public Builder threadCount(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.threadCount = value;
return this;
}
public Builder userAgent(final UserAgentContainer value) {
checkNotNull(value, "expected non-null value");
this.userAgent = value;
return this;
}
} |
I am pretty sure it will not compile - because we don't have this method any more :) | public Builder(ConnectionPolicy connectionPolicy) {
this.bufferPageSize = DEFAULT_OPTIONS.bufferPageSize;
this.connectionAcquisitionTimeout = DEFAULT_OPTIONS.connectionAcquisitionTimeout;
this.connectTimeout = connectionPolicy.getConnectTimeout();
this.idleChannelTimeout = connectionPolicy.getIdleConnectionTimeout();
this.idleEndpointTimeout = DEFAULT_OPTIONS.idleEndpointTimeout;
this.maxBufferCapacity = DEFAULT_OPTIONS.maxBufferCapacity;
this.maxChannelsPerEndpoint = connectionPolicy.getMaxConnectionsPerEndpoint();
this.maxRequestsPerChannel = connectionPolicy.getMaxRequestsPerConnection();
this.receiveHangDetectionTime = DEFAULT_OPTIONS.receiveHangDetectionTime;
this.requestExpiryInterval = DEFAULT_OPTIONS.requestExpiryInterval;
this.requestTimeout = connectionPolicy.getRequestTimeout();
this.requestTimerResolution = DEFAULT_OPTIONS.requestTimerResolution;
this.sendHangDetectionTime = DEFAULT_OPTIONS.sendHangDetectionTime;
this.shutdownTimeout = DEFAULT_OPTIONS.shutdownTimeout;
this.threadCount = DEFAULT_OPTIONS.threadCount;
this.userAgent = DEFAULT_OPTIONS.userAgent;
} | this.idleChannelTimeout = connectionPolicy.getIdleConnectionTimeout(); | public Builder(ConnectionPolicy connectionPolicy) {
this.bufferPageSize = DEFAULT_OPTIONS.bufferPageSize;
this.connectionAcquisitionTimeout = DEFAULT_OPTIONS.connectionAcquisitionTimeout;
this.connectTimeout = connectionPolicy.getConnectTimeout();
this.idleChannelTimeout = connectionPolicy.getIdleTcpConnectionTimeout();
this.idleEndpointTimeout = DEFAULT_OPTIONS.idleEndpointTimeout;
this.maxBufferCapacity = DEFAULT_OPTIONS.maxBufferCapacity;
this.maxChannelsPerEndpoint = connectionPolicy.getMaxConnectionsPerEndpoint();
this.maxRequestsPerChannel = connectionPolicy.getMaxRequestsPerConnection();
this.receiveHangDetectionTime = DEFAULT_OPTIONS.receiveHangDetectionTime;
this.requestExpiryInterval = DEFAULT_OPTIONS.requestExpiryInterval;
this.requestTimeout = connectionPolicy.getRequestTimeout();
this.requestTimerResolution = DEFAULT_OPTIONS.requestTimerResolution;
this.sendHangDetectionTime = DEFAULT_OPTIONS.sendHangDetectionTime;
this.shutdownTimeout = DEFAULT_OPTIONS.shutdownTimeout;
this.threadCount = DEFAULT_OPTIONS.threadCount;
this.userAgent = DEFAULT_OPTIONS.userAgent;
} | class Builder {
private static final String DEFAULT_OPTIONS_PROPERTY_NAME = "azure.cosmos.directTcp.defaultOptions";
private static final Options DEFAULT_OPTIONS;
static {
Options options = null;
try {
final String string = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME);
if (string != null) {
try {
options = RntbdObjectMapper.readValue(string, Options.class);
} catch (IOException error) {
logger.error("failed to parse default Direct TCP options {} due to ", string, error);
}
}
if (options == null) {
final String path = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME + "File");
if (path != null) {
try {
options = RntbdObjectMapper.readValue(new File(path), Options.class);
} catch (IOException error) {
logger.error("failed to load default Direct TCP options from {} due to ", path, error);
}
}
}
if (options == null) {
final ClassLoader loader = RntbdTransportClient.class.getClassLoader();
final String name = DEFAULT_OPTIONS_PROPERTY_NAME + ".json";
try (InputStream stream = loader.getResourceAsStream(name)) {
if (stream != null) {
options = RntbdObjectMapper.readValue(stream, Options.class);
}
} catch (IOException error) {
logger.error("failed to load Direct TCP options from resource {} due to ", name, error);
}
}
} finally {
if (options == null) {
DEFAULT_OPTIONS = new Options(ConnectionPolicy.getDefaultPolicy());
} else {
logger.info("Updated default Direct TCP options from system property {}: {}",
DEFAULT_OPTIONS_PROPERTY_NAME,
options);
DEFAULT_OPTIONS = options;
}
}
}
private int bufferPageSize;
private Duration connectionAcquisitionTimeout;
private Duration connectTimeout;
private Duration idleChannelTimeout;
private Duration idleEndpointTimeout;
private int maxBufferCapacity;
private int maxChannelsPerEndpoint;
private int maxRequestsPerChannel;
private Duration receiveHangDetectionTime;
private Duration requestExpiryInterval;
private Duration requestTimeout;
private Duration requestTimerResolution;
private Duration sendHangDetectionTime;
private Duration shutdownTimeout;
private int threadCount;
private UserAgentContainer userAgent;
public Builder bufferPageSize(final int value) {
checkArgument(value >= 4096 && (value & (value - 1)) == 0,
"expected value to be a power of 2 >= 4096, not %s",
value);
this.bufferPageSize = value;
return this;
}
public Options build() {
checkState(this.bufferPageSize <= this.maxBufferCapacity,
"expected bufferPageSize (%s) <= maxBufferCapacity (%s)",
this.bufferPageSize,
this.maxBufferCapacity);
return new Options(this);
}
public Builder connectionAcquisitionTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.connectTimeout = value.compareTo(Duration.ZERO) < 0 ? Duration.ZERO : value;
return this;
}
public Builder connectionTimeout(final Duration value) {
checkArgument(value == null || value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.connectTimeout = value;
return this;
}
public Builder idleChannelTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.idleChannelTimeout = value;
return this;
}
public Builder idleEndpointTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.idleEndpointTimeout = value;
return this;
}
public Builder maxBufferCapacity(final int value) {
checkArgument(value > 0 && (value & (value - 1)) == 0,
"expected positive value, not %s",
value);
this.maxBufferCapacity = value;
return this;
}
public Builder maxChannelsPerEndpoint(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxChannelsPerEndpoint = value;
return this;
}
public Builder maxRequestsPerChannel(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxRequestsPerChannel = value;
return this;
}
public Builder receiveHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.receiveHangDetectionTime = value;
return this;
}
public Builder requestExpiryInterval(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestExpiryInterval = value;
return this;
}
public Builder requestTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimeout = value;
return this;
}
public Builder requestTimerResolution(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimerResolution = value;
return this;
}
public Builder sendHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.sendHangDetectionTime = value;
return this;
}
public Builder shutdownTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.shutdownTimeout = value;
return this;
}
public Builder threadCount(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.threadCount = value;
return this;
}
public Builder userAgent(final UserAgentContainer value) {
checkNotNull(value, "expected non-null value");
this.userAgent = value;
return this;
}
} | class Builder {
private static final String DEFAULT_OPTIONS_PROPERTY_NAME = "azure.cosmos.directTcp.defaultOptions";
private static final Options DEFAULT_OPTIONS;
static {
Options options = null;
try {
final String string = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME);
if (string != null) {
try {
options = RntbdObjectMapper.readValue(string, Options.class);
} catch (IOException error) {
logger.error("failed to parse default Direct TCP options {} due to ", string, error);
}
}
if (options == null) {
final String path = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME + "File");
if (path != null) {
try {
options = RntbdObjectMapper.readValue(new File(path), Options.class);
} catch (IOException error) {
logger.error("failed to load default Direct TCP options from {} due to ", path, error);
}
}
}
if (options == null) {
final ClassLoader loader = RntbdTransportClient.class.getClassLoader();
final String name = DEFAULT_OPTIONS_PROPERTY_NAME + ".json";
try (InputStream stream = loader.getResourceAsStream(name)) {
if (stream != null) {
options = RntbdObjectMapper.readValue(stream, Options.class);
}
} catch (IOException error) {
logger.error("failed to load Direct TCP options from resource {} due to ", name, error);
}
}
} finally {
if (options == null) {
DEFAULT_OPTIONS = new Options(ConnectionPolicy.getDefaultPolicy());
} else {
logger.info("Updated default Direct TCP options from system property {}: {}",
DEFAULT_OPTIONS_PROPERTY_NAME,
options);
DEFAULT_OPTIONS = options;
}
}
}
private int bufferPageSize;
private Duration connectionAcquisitionTimeout;
private Duration connectTimeout;
private Duration idleChannelTimeout;
private Duration idleEndpointTimeout;
private int maxBufferCapacity;
private int maxChannelsPerEndpoint;
private int maxRequestsPerChannel;
private Duration receiveHangDetectionTime;
private Duration requestExpiryInterval;
private Duration requestTimeout;
private Duration requestTimerResolution;
private Duration sendHangDetectionTime;
private Duration shutdownTimeout;
private int threadCount;
private UserAgentContainer userAgent;
public Builder bufferPageSize(final int value) {
checkArgument(value >= 4096 && (value & (value - 1)) == 0,
"expected value to be a power of 2 >= 4096, not %s",
value);
this.bufferPageSize = value;
return this;
}
public Options build() {
checkState(this.bufferPageSize <= this.maxBufferCapacity,
"expected bufferPageSize (%s) <= maxBufferCapacity (%s)",
this.bufferPageSize,
this.maxBufferCapacity);
return new Options(this);
}
public Builder connectionAcquisitionTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.connectTimeout = value.compareTo(Duration.ZERO) < 0 ? Duration.ZERO : value;
return this;
}
public Builder connectionTimeout(final Duration value) {
checkArgument(value == null || value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.connectTimeout = value;
return this;
}
public Builder idleChannelTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.idleChannelTimeout = value;
return this;
}
public Builder idleEndpointTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.idleEndpointTimeout = value;
return this;
}
public Builder maxBufferCapacity(final int value) {
checkArgument(value > 0 && (value & (value - 1)) == 0,
"expected positive value, not %s",
value);
this.maxBufferCapacity = value;
return this;
}
public Builder maxChannelsPerEndpoint(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxChannelsPerEndpoint = value;
return this;
}
public Builder maxRequestsPerChannel(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxRequestsPerChannel = value;
return this;
}
public Builder receiveHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.receiveHangDetectionTime = value;
return this;
}
public Builder requestExpiryInterval(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestExpiryInterval = value;
return this;
}
public Builder requestTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimeout = value;
return this;
}
public Builder requestTimerResolution(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimerResolution = value;
return this;
}
public Builder sendHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.sendHangDetectionTime = value;
return this;
}
public Builder shutdownTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.shutdownTimeout = value;
return this;
}
public Builder threadCount(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.threadCount = value;
return this;
}
public Builder userAgent(final UserAgentContainer value) {
checkNotNull(value, "expected non-null value");
this.userAgent = value;
return this;
}
} |
Sure we can, but we should not touch public surface implementations here as this PR should only target the tracing APIs. I have created a work item for this refactor work : https://github.com/Azure/azure-sdk-for-java/issues/13031 | public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
} | return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context)); | public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @param throughput the throughput for the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput));
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) {
return responseMono.onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if(throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
if (context != null) {
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, context);
}
return createDatabase(new CosmosDatabaseProperties(database.getId()),
requestOptions);
}
}
return Mono.error(unwrappedException);
});
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) {
return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final DataEncryptionKeyProvider dataEncryptionKeyProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
Can this method call the method on L204 | public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
} | return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context)); | public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @param throughput the throughput for the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput));
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) {
return responseMono.onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if(throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
if (context != null) {
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, context);
}
return createDatabase(new CosmosDatabaseProperties(database.getId()),
requestOptions);
}
}
return Mono.error(unwrappedException);
});
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) {
return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final DataEncryptionKeyProvider dataEncryptionKeyProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
I see we don't set the context object with nested data info for this method. I was wondering if it would be a better approach to always set the key but the value should determine if the call is nested or not. That should give us a more robust way to future developer don't miss this out. | private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
} | pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null); | private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @param throughput the throughput for the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput));
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) {
return responseMono.onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if(throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
if (context != null) {
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, context);
}
return createDatabase(new CosmosDatabaseProperties(database.getId()),
requestOptions);
}
}
return Mono.error(unwrappedException);
});
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) {
return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final DataEncryptionKeyProvider dataEncryptionKeyProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
I think this comment is misplaced, please clarify. This is query code and all tracer logic in query is [here](https://github.com/simplynaveen20/azure-sdk-for-java/blob/21c5743709725058254427079d898d2ad810da90/sdk/cosmos/azure-cosmos/src/main/java/com/azure/cosmos/util/CosmosPagedFlux.java#L35) | private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
} | pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null); | private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @param throughput the throughput for the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput));
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) {
return responseMono.onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if(throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
if (context != null) {
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, context);
}
return createDatabase(new CosmosDatabaseProperties(database.getId()),
requestOptions);
}
}
return Mono.error(unwrappedException);
});
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) {
return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final DataEncryptionKeyProvider dataEncryptionKeyProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
We could do that, but avoiding nesting call in public api. Also it would be easier to read and debug. And our tracer goal getting fulfill with current design, so if you don't mind, can we keep current state? | public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
} | return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context)); | public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @param throughput the throughput for the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput));
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) {
return responseMono.onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if(throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
if (context != null) {
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, context);
}
return createDatabase(new CosmosDatabaseProperties(database.getId()),
requestOptions);
}
}
return Mono.error(unwrappedException);
});
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) {
return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final DataEncryptionKeyProvider dataEncryptionKeyProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
@kushagraThapar Could you clarify if this is a design decision to not reuse the methods here? And if it is, why is it followed in some files and not all the time? More context - https://github.com/Azure/azure-sdk-for-java/pull/12867#discussion_r452564457 | public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
} | return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context)); | public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @param throughput the throughput for the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput));
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) {
return responseMono.onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if(throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
if (context != null) {
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, context);
}
return createDatabase(new CosmosDatabaseProperties(database.getId()),
requestOptions);
}
}
return Mono.error(unwrappedException);
});
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) {
return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final DataEncryptionKeyProvider dataEncryptionKeyProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
We will go by this approach,i.e adding nested data only on the api it needed , as this gave the best perf result, and adding data on all api might have an impact . | private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
} | pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null); | private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @param throughput the throughput for the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput));
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) {
return responseMono.onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if(throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
if (context != null) {
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, context);
}
return createDatabase(new CosmosDatabaseProperties(database.getId()),
requestOptions);
}
}
return Mono.error(unwrappedException);
});
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) {
return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final DataEncryptionKeyProvider dataEncryptionKeyProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
@samvaity - agreed, we can add nested / non-nested information on context always irrespective of the API if it doesn't result in a perf hit. Added it to this issue: https://github.com/Azure/azure-sdk-for-java/issues/13031 | private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
} | pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null); | private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @param throughput the throughput for the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
Mono<CosmosDatabaseResponse> createDatabase(String id, int throughput) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, ThroughputProperties.createManualThroughput(throughput));
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = createDatabaseIfNotExistsInternal(database.readInternal(new CosmosDatabaseRequestOptions(), nestedContext), database, throughputProperties, nestedContext);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(Mono<CosmosDatabaseResponse> responseMono, CosmosAsyncDatabase database, ThroughputProperties throughputProperties, Context context) {
return responseMono.onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if(throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
if (context != null) {
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, context);
}
return createDatabase(new CosmosDatabaseProperties(database.getId()),
requestOptions);
}
}
return Mono.error(unwrappedException);
});
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = createDatabaseInternal(database, options);
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options) {
return asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final DataEncryptionKeyProvider dataEncryptionKeyProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.dataEncryptionKeyProvider = builder.getDataEncryptionKeyProvider();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withDataEncryptionKeyProvider(this.dataEncryptionKeyProvider)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
return queryDatabasesInternal(querySpec, options);
}
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
Does this mean we are expecting, there would be 5 spans for a single `readItem`? | public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
} | Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
I thought we got rid of the span name `readUDF` and should be using the same public API name for user -API visibility concerns. | public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
} | verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context, | public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
Suggestion can be updated to check for the span name here ``` Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(eq("createContainerIfNotExists"), Matchers.anyString() ```` | public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
} | Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
Mockito does not work with partial matching, it will give runtime error , and anyway we are checking span name on mock tracer so this does not needed | public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
} | Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
Not sure what you mean, an example of what I am suggesting https://github.com/Azure/azure-sdk-for-java/blob/master/sdk/eventhubs/azure-messaging-eventhubs/src/test/java/com/azure/messaging/eventhubs/EventHubProducerAsyncClientTest.java#L426 | public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
} | Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
No there will be only 1 span for readItem. This is to verify how many times this method is called in test function. | public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
} | Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
Do we need the `spy` if we have a mock of tracer list for a valid tracerProvider object? nit: Consider adding static import for Mockito methods. | public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
} | TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer))); | public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
we can do readUDF -> readUserDefinedFunction ?, but cant do just read() as we have hierarchy model in cosmos and all resources use read() api | public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
} | verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context, | public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
Then that is an incorrect representation of the use case. As unit testing, the test should ideally be expecting 1 span for a single API call? | public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
} | Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
@simplynaveen20 - we should an incrementing counter instead of using numeral literal values in `Mockito.times(1)` call - so that it is much clear to us. | public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
} | Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
we need spy to get the context from startSpan to verify attributes , and also we are checking on number of timer startSpan is called on provider | public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
} | TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer))); | public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
added incremental variable instead of numeral literal | public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
} | Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
We are checking incremental invocation based on api calls in single test case , which is clubbed together as per our object model , like all container apis are under one test | public void cosmosAsyncContainer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 7);
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
} | Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(), | public void cosmosAsyncContainer() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.read().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readContainer." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
try {
cosmosAsyncContainer.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode item = new InternalObjectNode();
item.setId(ITEM_ID);
cosmosAsyncContainer.createItem(item).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.upsertItem(item,
new CosmosItemRequestOptions()).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "upsertItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
InternalObjectNode node = cosmosAsyncContainer.readItem(ITEM_ID, PartitionKey.NONE,
InternalObjectNode.class).block().getItem();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.deleteItem(ITEM_ID, PartitionKey.NONE).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.readAllItems(new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + ITEM_ID + "'";
cosmosAsyncContainer.queryItems(query, new CosmosQueryRequestOptions(), CosmosItemRequestOptions.class).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryItems." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId()).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 1);
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, 2);
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, 3);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
}
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 3);
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), 4);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = Mockito.mock(Tracer.class);
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(getMockTracer(mockTracer)));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(1)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 1);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(2)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 2);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(3)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 3);
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(4)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 4);
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(5)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 5);
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(6)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 6);
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(7)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(8)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUDF." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 8);
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(9)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 9);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(10)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 10);
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(11)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 11);
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(12)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(13)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 13);
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(14)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 14);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(15)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 15);
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(16)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 16);
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(18)).startSpan(Matchers.anyString(), Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), 18);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private List<Tracer> getMockTracer(Tracer tracer) {
List<Tracer> tracerList = new ArrayList<>();
tracerList.add(tracer);
Mockito.when(tracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return tracerList;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} | class CosmosTracerTest extends TestSuiteBase {
private static final String ITEM_ID = "tracerDoc";
CosmosAsyncClient client;
CosmosAsyncDatabase cosmosAsyncDatabase;
CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.directMode(DirectConnectionConfig.getDefaultConfig())
.buildAsyncClient();
cosmosAsyncDatabase = getSharedCosmosDatabase(client);
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(client);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncClient() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
client.createDatabaseIfNotExists(cosmosAsyncDatabase.getId(), ThroughputProperties.createManualThroughput(5000)).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createDatabaseIfNotExists." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
client.readAllDatabases(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllDatabases", context, null, traceApiCounter, null);
traceApiCounter++;
String query = "select * from c where c.id = '" + cosmosAsyncDatabase.getId() + "'";
client.queryDatabases(query, new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "queryDatabases", context, null, traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncDatabase() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncDatabase.createContainerIfNotExists(cosmosAsyncContainer.getId(),
"/pk", 5000).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createContainerIfNotExists." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllUsers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUsers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncDatabase.readAllContainers().byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllContainers." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
cosmosAsyncDatabase.readThroughput().block();
} catch (CosmosException ex) {
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readThroughput." + cosmosAsyncDatabase.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, errorType);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void cosmosAsyncScripts() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
int traceApiCounter = 1;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllStoredProcedures." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllTriggers." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readAllUserDefinedFunctions." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosUserDefinedFunctionProperties cosmosUserDefinedFunctionProperties =
getCosmosUserDefinedFunctionProperties();
CosmosUserDefinedFunctionProperties resultUdf =
cosmosAsyncContainer.getScripts().createUserDefinedFunction(cosmosUserDefinedFunctionProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosUserDefinedFunctionProperties.setBody("function() {var x = 15;}");
cosmosAsyncContainer.getScripts().getUserDefinedFunction(resultUdf.getId()).replace(resultUdf).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllUserDefinedFunctions(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getUserDefinedFunction(cosmosUserDefinedFunctionProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteUserDefinedFunction." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosTriggerProperties cosmosTriggerProperties = getCosmosTriggerProperties();
CosmosTriggerProperties resultTrigger =
cosmosAsyncContainer.getScripts().createTrigger(cosmosTriggerProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).replace(resultTrigger).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllTriggers(new CosmosQueryRequestOptions()).byPage().single().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
traceApiCounter++;
cosmosAsyncContainer.getScripts().getTrigger(cosmosTriggerProperties.getId()).delete().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteTrigger." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
CosmosStoredProcedureProperties procedureProperties = getCosmosStoredProcedureProperties();
CosmosStoredProcedureProperties resultSproc =
cosmosAsyncContainer.getScripts().createStoredProcedure(procedureProperties).block().getProperties();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).read().block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).replace(resultSproc).block();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "replaceStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
cosmosAsyncContainer.getScripts().readAllStoredProcedures(new CosmosQueryRequestOptions()).byPage().single().block();
cosmosAsyncContainer.getScripts().getStoredProcedure(procedureProperties.getId()).delete().block();
traceApiCounter++;
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "deleteStoredProcedure." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
}
@Test(groups = {"emulator"}, timeOut = TIMEOUT)
public void tracerExceptionSpan() {
Tracer mockTracer = getMockTracer();
TracerProvider tracerProvider = Mockito.spy(new TracerProvider(mockTracer));
ReflectionUtils.setTracerProvider(client, tracerProvider);
int traceApiCounter = 1;
TracerProviderCapture tracerProviderCapture = new TracerProviderCapture();
Mockito.doAnswer(tracerProviderCapture).when(tracerProvider).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
InternalObjectNode item = new InternalObjectNode();
item.setId("testDoc");
cosmosAsyncContainer.createItem(item).block();
Context context = tracerProviderCapture.getResult();
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "createItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter, null);
traceApiCounter++;
String errorType = null;
try {
PartitionKey partitionKey = new PartitionKey("wrongPk");
cosmosAsyncContainer.readItem("testDoc", partitionKey, null, InternalObjectNode.class).block();
fail("readItem should fail due to wrong pk");
} catch (CosmosException ex) {
assertThat(ex.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
errorType = ex.getClass().getName();
}
Mockito.verify(tracerProvider, Mockito.times(traceApiCounter)).startSpan(Matchers.anyString(),
Matchers.anyString(),
Matchers.anyString(), Matchers.any(Context.class));
verifyTracerAttributes(mockTracer, "readItem." + cosmosAsyncContainer.getId(), context,
cosmosAsyncDatabase.getId(), traceApiCounter
, errorType);
}
@AfterClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void afterClass() {
LifeCycleUtils.closeQuietly(client);
}
private static CosmosUserDefinedFunctionProperties getCosmosUserDefinedFunctionProperties() {
CosmosUserDefinedFunctionProperties udf =
new CosmosUserDefinedFunctionProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return udf;
}
private static CosmosTriggerProperties getCosmosTriggerProperties() {
CosmosTriggerProperties trigger = new CosmosTriggerProperties(UUID.randomUUID().toString(), "function() {var " +
"x = 10;}");
trigger.setTriggerOperation(TriggerOperation.CREATE);
trigger.setTriggerType(TriggerType.PRE);
return trigger;
}
private static CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef =
new CosmosStoredProcedureProperties(UUID.randomUUID().toString(), "function() {var x = 10;}");
return storedProcedureDef;
}
private Tracer getMockTracer() {
Tracer mockTracer = Mockito.mock(Tracer.class);
Mockito.when(mockTracer.start(Matchers.anyString(), Matchers.any(Context.class))).thenReturn(Context.NONE);
return mockTracer;
}
private void verifyTracerAttributes(Tracer mockTracer, String methodName, Context context, String databaseName,
int numberOfTimesCalledWithinTest, String errorType) {
if (databaseName != null) {
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_INSTANCE,
databaseName, context);
}
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_TYPE,
TracerProvider.DB_TYPE_VALUE, context);
Mockito.verify(mockTracer, Mockito.times(numberOfTimesCalledWithinTest)).setAttribute(TracerProvider.DB_URL,
TestConfigurations.HOST,
context);
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(TracerProvider.DB_STATEMENT, methodName, context);
if (errorType == null) {
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(0)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Matchers.anyString(), Mockito.eq(context));
} else {
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_TYPE)
, Mockito.eq(errorType), Mockito.eq(context));
Mockito.verify(mockTracer, Mockito.times(1)).setAttribute(Mockito.eq(TracerProvider.ERROR_MSG)
, Matchers.anyString(), Mockito.eq(context));
}
}
private class TracerProviderCapture implements Answer<Context> {
private Context result = null;
public Context getResult() {
return result;
}
@Override
public Context answer(InvocationOnMock invocationOnMock) throws Throwable {
result = (Context) invocationOnMock.callRealMethod();
return result;
}
}
} |
These shouldn't have to change. Input-output models should have setter overloads for both varargs and List. | public static AnalyzeTextOptions map(com.azure.search.documents.indexes.implementation.models.AnalyzeRequest obj) {
if (obj == null) {
return null;
}
AnalyzeTextOptions analyzeTextOptions = null;
if (obj.getTokenizer() != null) {
LexicalTokenizerName tokenizer = LexicalTokenizerNameConverter.map(obj.getTokenizer());
analyzeTextOptions = new AnalyzeTextOptions(obj.getText(), tokenizer);
} else {
LexicalAnalyzerName analyzer = LexicalAnalyzerNameConverter.map(obj.getAnalyzer());
analyzeTextOptions = new AnalyzeTextOptions(obj.getText(), analyzer);
}
if (obj.getCharFilters() != null) {
CharFilterName[] charFilters = obj.getCharFilters().stream()
.map(CharFilterNameConverter::map)
.toArray(CharFilterName[]::new);
analyzeTextOptions.setCharFilters(charFilters);
}
if (obj.getTokenFilters() != null) {
TokenFilterName[] tokenFilters = obj.getTokenFilters().stream()
.map(TokenFilterNameConverter::map)
.toArray(TokenFilterName[]::new);
analyzeTextOptions.setTokenFilters(tokenFilters);
}
return analyzeTextOptions;
} | .toArray(TokenFilterName[]::new); | public static AnalyzeTextOptions map(com.azure.search.documents.indexes.implementation.models.AnalyzeRequest obj) {
if (obj == null) {
return null;
}
AnalyzeTextOptions analyzeTextOptions = null;
if (obj.getTokenizer() != null) {
LexicalTokenizerName tokenizer = LexicalTokenizerNameConverter.map(obj.getTokenizer());
analyzeTextOptions = new AnalyzeTextOptions(obj.getText(), tokenizer);
} else {
LexicalAnalyzerName analyzer = LexicalAnalyzerNameConverter.map(obj.getAnalyzer());
analyzeTextOptions = new AnalyzeTextOptions(obj.getText(), analyzer);
}
if (obj.getCharFilters() != null) {
analyzeTextOptions.setCharFilters(obj.getCharFilters().stream()
.map(CharFilterNameConverter::map)
.toArray(CharFilterName[]::new));
}
if (obj.getTokenFilters() != null) {
analyzeTextOptions.setTokenFilters(obj.getTokenFilters().stream()
.map(TokenFilterNameConverter::map)
.toArray(TokenFilterName[]::new));
}
return analyzeTextOptions;
} | class AnalyzeRequestConverter {
/**
* Maps from {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest} to {@link AnalyzeTextOptions}.
*/
/**
* Maps from {@link AnalyzeTextOptions} to {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest}.
*/
public static com.azure.search.documents.indexes.implementation.models.AnalyzeRequest map(AnalyzeTextOptions obj) {
if (obj == null) {
return null;
}
com.azure.search.documents.indexes.implementation.models.AnalyzeRequest analyzeRequest =
new com.azure.search.documents.indexes.implementation.models.AnalyzeRequest(obj.getText());
if (obj.getCharFilters() != null) {
List<com.azure.search.documents.indexes.implementation.models.CharFilterName> charFilters =
obj.getCharFilters().stream().map(CharFilterNameConverter::map).collect(Collectors.toList());
analyzeRequest.setCharFilters(charFilters);
}
if (obj.getAnalyzerName() != null) {
com.azure.search.documents.indexes.implementation.models.LexicalAnalyzerName analyzer =
LexicalAnalyzerNameConverter.map(obj.getAnalyzerName());
analyzeRequest.setAnalyzer(analyzer);
}
if (obj.getTokenFilters() != null) {
List<com.azure.search.documents.indexes.implementation.models.TokenFilterName> tokenFilters =
obj.getTokenFilters().stream().map(TokenFilterNameConverter::map).collect(Collectors.toList());
analyzeRequest.setTokenFilters(tokenFilters);
}
if (obj.getTokenizerName() != null) {
com.azure.search.documents.indexes.implementation.models.LexicalTokenizerName tokenizer =
LexicalTokenizerNameConverter.map(obj.getTokenizerName());
analyzeRequest.setTokenizer(tokenizer);
}
analyzeRequest.validate();
return analyzeRequest;
}
private AnalyzeRequestConverter() {
}
} | class AnalyzeRequestConverter {
/**
* Maps from {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest} to {@link AnalyzeTextOptions}.
*/
/**
* Maps from {@link AnalyzeTextOptions} to {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest}.
*/
public static com.azure.search.documents.indexes.implementation.models.AnalyzeRequest map(AnalyzeTextOptions obj) {
if (obj == null) {
return null;
}
com.azure.search.documents.indexes.implementation.models.AnalyzeRequest analyzeRequest =
new com.azure.search.documents.indexes.implementation.models.AnalyzeRequest(obj.getText());
if (obj.getCharFilters() != null) {
List<com.azure.search.documents.indexes.implementation.models.CharFilterName> charFilters =
obj.getCharFilters().stream().map(CharFilterNameConverter::map).collect(Collectors.toList());
analyzeRequest.setCharFilters(charFilters);
}
if (obj.getAnalyzerName() != null) {
com.azure.search.documents.indexes.implementation.models.LexicalAnalyzerName analyzer =
LexicalAnalyzerNameConverter.map(obj.getAnalyzerName());
analyzeRequest.setAnalyzer(analyzer);
}
if (obj.getTokenFilters() != null) {
List<com.azure.search.documents.indexes.implementation.models.TokenFilterName> tokenFilters =
obj.getTokenFilters().stream().map(TokenFilterNameConverter::map).collect(Collectors.toList());
analyzeRequest.setTokenFilters(tokenFilters);
}
if (obj.getTokenizerName() != null) {
com.azure.search.documents.indexes.implementation.models.LexicalTokenizerName tokenizer =
LexicalTokenizerNameConverter.map(obj.getTokenizerName());
analyzeRequest.setTokenizer(tokenizer);
}
analyzeRequest.validate();
return analyzeRequest;
}
private AnalyzeRequestConverter() {
}
} |
NIT: final static "consts" as class level to avoid allocation for every execution | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | final String segmentSeparator = " | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
Reverted | public static AnalyzeTextOptions map(com.azure.search.documents.indexes.implementation.models.AnalyzeRequest obj) {
if (obj == null) {
return null;
}
AnalyzeTextOptions analyzeTextOptions = null;
if (obj.getTokenizer() != null) {
LexicalTokenizerName tokenizer = LexicalTokenizerNameConverter.map(obj.getTokenizer());
analyzeTextOptions = new AnalyzeTextOptions(obj.getText(), tokenizer);
} else {
LexicalAnalyzerName analyzer = LexicalAnalyzerNameConverter.map(obj.getAnalyzer());
analyzeTextOptions = new AnalyzeTextOptions(obj.getText(), analyzer);
}
if (obj.getCharFilters() != null) {
CharFilterName[] charFilters = obj.getCharFilters().stream()
.map(CharFilterNameConverter::map)
.toArray(CharFilterName[]::new);
analyzeTextOptions.setCharFilters(charFilters);
}
if (obj.getTokenFilters() != null) {
TokenFilterName[] tokenFilters = obj.getTokenFilters().stream()
.map(TokenFilterNameConverter::map)
.toArray(TokenFilterName[]::new);
analyzeTextOptions.setTokenFilters(tokenFilters);
}
return analyzeTextOptions;
} | .toArray(TokenFilterName[]::new); | public static AnalyzeTextOptions map(com.azure.search.documents.indexes.implementation.models.AnalyzeRequest obj) {
if (obj == null) {
return null;
}
AnalyzeTextOptions analyzeTextOptions = null;
if (obj.getTokenizer() != null) {
LexicalTokenizerName tokenizer = LexicalTokenizerNameConverter.map(obj.getTokenizer());
analyzeTextOptions = new AnalyzeTextOptions(obj.getText(), tokenizer);
} else {
LexicalAnalyzerName analyzer = LexicalAnalyzerNameConverter.map(obj.getAnalyzer());
analyzeTextOptions = new AnalyzeTextOptions(obj.getText(), analyzer);
}
if (obj.getCharFilters() != null) {
analyzeTextOptions.setCharFilters(obj.getCharFilters().stream()
.map(CharFilterNameConverter::map)
.toArray(CharFilterName[]::new));
}
if (obj.getTokenFilters() != null) {
analyzeTextOptions.setTokenFilters(obj.getTokenFilters().stream()
.map(TokenFilterNameConverter::map)
.toArray(TokenFilterName[]::new));
}
return analyzeTextOptions;
} | class AnalyzeRequestConverter {
/**
* Maps from {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest} to {@link AnalyzeTextOptions}.
*/
/**
* Maps from {@link AnalyzeTextOptions} to {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest}.
*/
public static com.azure.search.documents.indexes.implementation.models.AnalyzeRequest map(AnalyzeTextOptions obj) {
if (obj == null) {
return null;
}
com.azure.search.documents.indexes.implementation.models.AnalyzeRequest analyzeRequest =
new com.azure.search.documents.indexes.implementation.models.AnalyzeRequest(obj.getText());
if (obj.getCharFilters() != null) {
List<com.azure.search.documents.indexes.implementation.models.CharFilterName> charFilters =
obj.getCharFilters().stream().map(CharFilterNameConverter::map).collect(Collectors.toList());
analyzeRequest.setCharFilters(charFilters);
}
if (obj.getAnalyzerName() != null) {
com.azure.search.documents.indexes.implementation.models.LexicalAnalyzerName analyzer =
LexicalAnalyzerNameConverter.map(obj.getAnalyzerName());
analyzeRequest.setAnalyzer(analyzer);
}
if (obj.getTokenFilters() != null) {
List<com.azure.search.documents.indexes.implementation.models.TokenFilterName> tokenFilters =
obj.getTokenFilters().stream().map(TokenFilterNameConverter::map).collect(Collectors.toList());
analyzeRequest.setTokenFilters(tokenFilters);
}
if (obj.getTokenizerName() != null) {
com.azure.search.documents.indexes.implementation.models.LexicalTokenizerName tokenizer =
LexicalTokenizerNameConverter.map(obj.getTokenizerName());
analyzeRequest.setTokenizer(tokenizer);
}
analyzeRequest.validate();
return analyzeRequest;
}
private AnalyzeRequestConverter() {
}
} | class AnalyzeRequestConverter {
/**
* Maps from {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest} to {@link AnalyzeTextOptions}.
*/
/**
* Maps from {@link AnalyzeTextOptions} to {@link com.azure.search.documents.indexes.implementation.models.AnalyzeRequest}.
*/
public static com.azure.search.documents.indexes.implementation.models.AnalyzeRequest map(AnalyzeTextOptions obj) {
if (obj == null) {
return null;
}
com.azure.search.documents.indexes.implementation.models.AnalyzeRequest analyzeRequest =
new com.azure.search.documents.indexes.implementation.models.AnalyzeRequest(obj.getText());
if (obj.getCharFilters() != null) {
List<com.azure.search.documents.indexes.implementation.models.CharFilterName> charFilters =
obj.getCharFilters().stream().map(CharFilterNameConverter::map).collect(Collectors.toList());
analyzeRequest.setCharFilters(charFilters);
}
if (obj.getAnalyzerName() != null) {
com.azure.search.documents.indexes.implementation.models.LexicalAnalyzerName analyzer =
LexicalAnalyzerNameConverter.map(obj.getAnalyzerName());
analyzeRequest.setAnalyzer(analyzer);
}
if (obj.getTokenFilters() != null) {
List<com.azure.search.documents.indexes.implementation.models.TokenFilterName> tokenFilters =
obj.getTokenFilters().stream().map(TokenFilterNameConverter::map).collect(Collectors.toList());
analyzeRequest.setTokenFilters(tokenFilters);
}
if (obj.getTokenizerName() != null) {
com.azure.search.documents.indexes.implementation.models.LexicalTokenizerName tokenizer =
LexicalTokenizerNameConverter.map(obj.getTokenizerName());
analyzeRequest.setTokenizer(tokenizer);
}
analyzeRequest.validate();
return analyzeRequest;
}
private AnalyzeRequestConverter() {
}
} |
Nice. | public void executeStoredProcedureWithScriptLoggingEnabled() throws Exception {
CosmosStoredProcedureProperties storedProcedure = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {" +
" var mytext = \"x\";" +
" var myval = 1;" +
" try {" +
" console.log(\"The value of %s is %s.\", mytext, myval);" +
" getContext().getResponse().setBody(\"Success!\");" +
" }" +
" catch(err) {" +
" getContext().getResponse().setBody(\"inline err: [\" + err.number + \"] \" + err);" +
" }" +
"}");
container.getScripts().createStoredProcedure(storedProcedure);
CosmosStoredProcedureRequestOptions options = new CosmosStoredProcedureRequestOptions();
options.setScriptLoggingEnabled(true);
options.setPartitionKey(PartitionKey.NONE);
CosmosStoredProcedureResponse executeResponse = container.getScripts()
.getStoredProcedure(storedProcedure.getId())
.execute(null, options);
String logResult = "The value of x is 1.";
assertThat(URLDecoder.decode(executeResponse.getScriptLog(), StandardCharsets.UTF_8)).isEqualTo(logResult);
} | assertThat(URLDecoder.decode(executeResponse.getScriptLog(), StandardCharsets.UTF_8)).isEqualTo(logResult); | public void executeStoredProcedureWithScriptLoggingEnabled() throws Exception {
CosmosStoredProcedureProperties storedProcedure = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {" +
" var mytext = \"x\";" +
" var myval = 1;" +
" try {" +
" console.log(\"The value of %s is %s.\", mytext, myval);" +
" getContext().getResponse().setBody(\"Success!\");" +
" }" +
" catch(err) {" +
" getContext().getResponse().setBody(\"inline err: [\" + err.number + \"] \" + err);" +
" }" +
"}");
container.getScripts().createStoredProcedure(storedProcedure);
CosmosStoredProcedureRequestOptions options = new CosmosStoredProcedureRequestOptions();
options.setScriptLoggingEnabled(true);
options.setPartitionKey(PartitionKey.NONE);
CosmosStoredProcedureResponse executeResponse = container.getScripts()
.getStoredProcedure(storedProcedure.getId())
.execute(null, options);
String logResult = "The value of x is 1.";
assertThat(executeResponse.getScriptLog()).isEqualTo(logResult);
} | class CosmosSyncStoredProcTest extends TestSuiteBase {
private String preExistingDatabaseId = CosmosDatabaseForTest.generateId();
private List<String> databases = new ArrayList<>();
private CosmosClient client;
private CosmosContainer container;
@Factory(dataProvider = "clientBuilders")
public CosmosSyncStoredProcTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void before_CosmosSyncStoredProcTest() {
assertThat(this.client).isNull();
this.client = getClientBuilder().buildClient();
CosmosAsyncContainer asyncContainer = getSharedMultiPartitionCosmosContainer(this.client.asyncClient());
container = client.getDatabase(asyncContainer.getDatabase().getId()).getContainer(asyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.client).isNotNull();
this.client.close();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void createStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
storedProcedureDef.setId(UUID.randomUUID().toString());
storedProcedureDef.setBody("function() {var x = 11;}");
CosmosStoredProcedureResponse response1 = container.getScripts()
.createStoredProcedure(storedProcedureDef,
new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, response1);
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void createSproc_alreadyExists() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
try {
container.getScripts().createStoredProcedure(storedProcedureDef);
} catch (Exception e) {
assertThat(e).isInstanceOf(CosmosException.class);
assertThat(((CosmosException) e).getStatusCode()).isEqualTo(HttpConstants.StatusCodes.CONFLICT);
}
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void readStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
CosmosStoredProcedure storedProcedure = container.getScripts().getStoredProcedure(storedProcedureDef.getId());
CosmosStoredProcedureResponse readResponse = storedProcedure.read();
validateResponse(storedProcedureDef, readResponse);
CosmosStoredProcedureResponse readResponse2 =
storedProcedure.read(new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, readResponse2);
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void replaceStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
CosmosStoredProcedureResponse readResponse = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.read();
validateResponse(storedProcedureDef, readResponse);
storedProcedureDef = readResponse.getProperties();
storedProcedureDef.setBody("function(){ var y = 20;}");
CosmosStoredProcedureResponse replaceResponse = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.replace(storedProcedureDef);
validateResponse(storedProcedureDef, replaceResponse);
storedProcedureDef.setBody("function(){ var z = 2;}");
CosmosStoredProcedureResponse replaceResponse2 = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.replace(storedProcedureDef,
new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, replaceResponse2);
}
private CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {var x = 10;}"
);
return storedProcedureDef;
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void deleteStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.delete();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void executeStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedure = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {" +
" var mytext = \"x\";" +
" var myval = 1;" +
" try {" +
" console.log(\"The value of %s is %s.\", mytext, myval);" +
" getContext().getResponse().setBody(\"Success!\");" +
" }" +
" catch(err) {" +
" getContext().getResponse().setBody(\"inline err: [\" + err.number + \"] \" + err);" +
" }" +
"}");
container.getScripts().createStoredProcedure(storedProcedure);
CosmosStoredProcedureRequestOptions options = new CosmosStoredProcedureRequestOptions();
options.setPartitionKey(PartitionKey.NONE);
CosmosStoredProcedureResponse executeResponse = container.getScripts()
.getStoredProcedure(storedProcedure.getId())
.execute(null, options);
assertThat(executeResponse.getActivityId()).isNotEmpty();
assertThat(executeResponse.getScriptLog()).isNull();
}
@Test(groups = "simple", timeOut = TIMEOUT)
@Test(groups = {"simple"}, timeOut = TIMEOUT)
private void readAllSprocs() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
container.getScripts().createStoredProcedure(storedProcedureDef);
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator3 =
container.getScripts().readAllStoredProcedures(cosmosQueryRequestOptions);
assertThat(feedResponseIterator3.iterator().hasNext()).isTrue();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
private void querySprocs() throws Exception {
CosmosStoredProcedureProperties properties = getCosmosStoredProcedureProperties();
container.getScripts().createStoredProcedure(properties);
String query = String.format("SELECT * from c where c.id = '%s'", properties.getId());
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator1 =
container.getScripts().queryStoredProcedures(query, cosmosQueryRequestOptions);
assertThat(feedResponseIterator1.iterator().hasNext()).isTrue();
SqlQuerySpec querySpec = new SqlQuerySpec(query);
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator2 =
container.getScripts().queryStoredProcedures(query, cosmosQueryRequestOptions);
assertThat(feedResponseIterator2.iterator().hasNext()).isTrue();
}
private void validateResponse(CosmosStoredProcedureProperties properties,
CosmosStoredProcedureResponse createResponse) {
assertThat(createResponse.getProperties().getId()).isNotNull();
assertThat(createResponse.getProperties().getId())
.as("check Resource Id")
.isEqualTo(properties.getId());
}
} | class CosmosSyncStoredProcTest extends TestSuiteBase {
private String preExistingDatabaseId = CosmosDatabaseForTest.generateId();
private List<String> databases = new ArrayList<>();
private CosmosClient client;
private CosmosContainer container;
@Factory(dataProvider = "clientBuilders")
public CosmosSyncStoredProcTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void before_CosmosSyncStoredProcTest() {
assertThat(this.client).isNull();
this.client = getClientBuilder().buildClient();
CosmosAsyncContainer asyncContainer = getSharedMultiPartitionCosmosContainer(this.client.asyncClient());
container = client.getDatabase(asyncContainer.getDatabase().getId()).getContainer(asyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.client).isNotNull();
this.client.close();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void createStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
storedProcedureDef.setId(UUID.randomUUID().toString());
storedProcedureDef.setBody("function() {var x = 11;}");
CosmosStoredProcedureResponse response1 = container.getScripts()
.createStoredProcedure(storedProcedureDef,
new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, response1);
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void createSproc_alreadyExists() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
try {
container.getScripts().createStoredProcedure(storedProcedureDef);
} catch (Exception e) {
assertThat(e).isInstanceOf(CosmosException.class);
assertThat(((CosmosException) e).getStatusCode()).isEqualTo(HttpConstants.StatusCodes.CONFLICT);
}
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void readStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
CosmosStoredProcedure storedProcedure = container.getScripts().getStoredProcedure(storedProcedureDef.getId());
CosmosStoredProcedureResponse readResponse = storedProcedure.read();
validateResponse(storedProcedureDef, readResponse);
CosmosStoredProcedureResponse readResponse2 =
storedProcedure.read(new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, readResponse2);
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void replaceStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
CosmosStoredProcedureResponse readResponse = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.read();
validateResponse(storedProcedureDef, readResponse);
storedProcedureDef = readResponse.getProperties();
storedProcedureDef.setBody("function(){ var y = 20;}");
CosmosStoredProcedureResponse replaceResponse = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.replace(storedProcedureDef);
validateResponse(storedProcedureDef, replaceResponse);
storedProcedureDef.setBody("function(){ var z = 2;}");
CosmosStoredProcedureResponse replaceResponse2 = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.replace(storedProcedureDef,
new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, replaceResponse2);
}
private CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {var x = 10;}"
);
return storedProcedureDef;
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void deleteStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.delete();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void executeStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedure = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {" +
" var mytext = \"x\";" +
" var myval = 1;" +
" try {" +
" console.log(\"The value of %s is %s.\", mytext, myval);" +
" getContext().getResponse().setBody(\"Success!\");" +
" }" +
" catch(err) {" +
" getContext().getResponse().setBody(\"inline err: [\" + err.number + \"] \" + err);" +
" }" +
"}");
container.getScripts().createStoredProcedure(storedProcedure);
CosmosStoredProcedureRequestOptions options = new CosmosStoredProcedureRequestOptions();
options.setPartitionKey(PartitionKey.NONE);
CosmosStoredProcedureResponse executeResponse = container.getScripts()
.getStoredProcedure(storedProcedure.getId())
.execute(null, options);
assertThat(executeResponse.getActivityId()).isNotEmpty();
assertThat(executeResponse.getScriptLog()).isNull();
}
@Test(groups = "simple", timeOut = TIMEOUT)
@Test(groups = {"simple"}, timeOut = TIMEOUT)
private void readAllSprocs() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
container.getScripts().createStoredProcedure(storedProcedureDef);
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator3 =
container.getScripts().readAllStoredProcedures(cosmosQueryRequestOptions);
assertThat(feedResponseIterator3.iterator().hasNext()).isTrue();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
private void querySprocs() throws Exception {
CosmosStoredProcedureProperties properties = getCosmosStoredProcedureProperties();
container.getScripts().createStoredProcedure(properties);
String query = String.format("SELECT * from c where c.id = '%s'", properties.getId());
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator1 =
container.getScripts().queryStoredProcedures(query, cosmosQueryRequestOptions);
assertThat(feedResponseIterator1.iterator().hasNext()).isTrue();
SqlQuerySpec querySpec = new SqlQuerySpec(query);
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator2 =
container.getScripts().queryStoredProcedures(query, cosmosQueryRequestOptions);
assertThat(feedResponseIterator2.iterator().hasNext()).isTrue();
}
private void validateResponse(CosmosStoredProcedureProperties properties,
CosmosStoredProcedureResponse createResponse) {
assertThat(createResponse.getProperties().getId()).isNotNull();
assertThat(createResponse.getProperties().getId())
.as("check Resource Id")
.isEqualTo(properties.getId());
}
} |
I wonder whose job it is to do the decoding? is it the SDK job to decode scriptLog or the user? | public void executeStoredProcedureWithScriptLoggingEnabled() throws Exception {
CosmosStoredProcedureProperties storedProcedure = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {" +
" var mytext = \"x\";" +
" var myval = 1;" +
" try {" +
" console.log(\"The value of %s is %s.\", mytext, myval);" +
" getContext().getResponse().setBody(\"Success!\");" +
" }" +
" catch(err) {" +
" getContext().getResponse().setBody(\"inline err: [\" + err.number + \"] \" + err);" +
" }" +
"}");
container.getScripts().createStoredProcedure(storedProcedure);
CosmosStoredProcedureRequestOptions options = new CosmosStoredProcedureRequestOptions();
options.setScriptLoggingEnabled(true);
options.setPartitionKey(PartitionKey.NONE);
CosmosStoredProcedureResponse executeResponse = container.getScripts()
.getStoredProcedure(storedProcedure.getId())
.execute(null, options);
String logResult = "The value of x is 1.";
assertThat(URLDecoder.decode(executeResponse.getScriptLog(), StandardCharsets.UTF_8)).isEqualTo(logResult);
} | assertThat(URLDecoder.decode(executeResponse.getScriptLog(), StandardCharsets.UTF_8)).isEqualTo(logResult); | public void executeStoredProcedureWithScriptLoggingEnabled() throws Exception {
CosmosStoredProcedureProperties storedProcedure = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {" +
" var mytext = \"x\";" +
" var myval = 1;" +
" try {" +
" console.log(\"The value of %s is %s.\", mytext, myval);" +
" getContext().getResponse().setBody(\"Success!\");" +
" }" +
" catch(err) {" +
" getContext().getResponse().setBody(\"inline err: [\" + err.number + \"] \" + err);" +
" }" +
"}");
container.getScripts().createStoredProcedure(storedProcedure);
CosmosStoredProcedureRequestOptions options = new CosmosStoredProcedureRequestOptions();
options.setScriptLoggingEnabled(true);
options.setPartitionKey(PartitionKey.NONE);
CosmosStoredProcedureResponse executeResponse = container.getScripts()
.getStoredProcedure(storedProcedure.getId())
.execute(null, options);
String logResult = "The value of x is 1.";
assertThat(executeResponse.getScriptLog()).isEqualTo(logResult);
} | class CosmosSyncStoredProcTest extends TestSuiteBase {
private String preExistingDatabaseId = CosmosDatabaseForTest.generateId();
private List<String> databases = new ArrayList<>();
private CosmosClient client;
private CosmosContainer container;
@Factory(dataProvider = "clientBuilders")
public CosmosSyncStoredProcTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void before_CosmosSyncStoredProcTest() {
assertThat(this.client).isNull();
this.client = getClientBuilder().buildClient();
CosmosAsyncContainer asyncContainer = getSharedMultiPartitionCosmosContainer(this.client.asyncClient());
container = client.getDatabase(asyncContainer.getDatabase().getId()).getContainer(asyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.client).isNotNull();
this.client.close();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void createStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
storedProcedureDef.setId(UUID.randomUUID().toString());
storedProcedureDef.setBody("function() {var x = 11;}");
CosmosStoredProcedureResponse response1 = container.getScripts()
.createStoredProcedure(storedProcedureDef,
new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, response1);
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void createSproc_alreadyExists() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
try {
container.getScripts().createStoredProcedure(storedProcedureDef);
} catch (Exception e) {
assertThat(e).isInstanceOf(CosmosException.class);
assertThat(((CosmosException) e).getStatusCode()).isEqualTo(HttpConstants.StatusCodes.CONFLICT);
}
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void readStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
CosmosStoredProcedure storedProcedure = container.getScripts().getStoredProcedure(storedProcedureDef.getId());
CosmosStoredProcedureResponse readResponse = storedProcedure.read();
validateResponse(storedProcedureDef, readResponse);
CosmosStoredProcedureResponse readResponse2 =
storedProcedure.read(new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, readResponse2);
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void replaceStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
CosmosStoredProcedureResponse readResponse = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.read();
validateResponse(storedProcedureDef, readResponse);
storedProcedureDef = readResponse.getProperties();
storedProcedureDef.setBody("function(){ var y = 20;}");
CosmosStoredProcedureResponse replaceResponse = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.replace(storedProcedureDef);
validateResponse(storedProcedureDef, replaceResponse);
storedProcedureDef.setBody("function(){ var z = 2;}");
CosmosStoredProcedureResponse replaceResponse2 = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.replace(storedProcedureDef,
new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, replaceResponse2);
}
private CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {var x = 10;}"
);
return storedProcedureDef;
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void deleteStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.delete();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void executeStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedure = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {" +
" var mytext = \"x\";" +
" var myval = 1;" +
" try {" +
" console.log(\"The value of %s is %s.\", mytext, myval);" +
" getContext().getResponse().setBody(\"Success!\");" +
" }" +
" catch(err) {" +
" getContext().getResponse().setBody(\"inline err: [\" + err.number + \"] \" + err);" +
" }" +
"}");
container.getScripts().createStoredProcedure(storedProcedure);
CosmosStoredProcedureRequestOptions options = new CosmosStoredProcedureRequestOptions();
options.setPartitionKey(PartitionKey.NONE);
CosmosStoredProcedureResponse executeResponse = container.getScripts()
.getStoredProcedure(storedProcedure.getId())
.execute(null, options);
assertThat(executeResponse.getActivityId()).isNotEmpty();
assertThat(executeResponse.getScriptLog()).isNull();
}
@Test(groups = "simple", timeOut = TIMEOUT)
@Test(groups = {"simple"}, timeOut = TIMEOUT)
private void readAllSprocs() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
container.getScripts().createStoredProcedure(storedProcedureDef);
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator3 =
container.getScripts().readAllStoredProcedures(cosmosQueryRequestOptions);
assertThat(feedResponseIterator3.iterator().hasNext()).isTrue();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
private void querySprocs() throws Exception {
CosmosStoredProcedureProperties properties = getCosmosStoredProcedureProperties();
container.getScripts().createStoredProcedure(properties);
String query = String.format("SELECT * from c where c.id = '%s'", properties.getId());
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator1 =
container.getScripts().queryStoredProcedures(query, cosmosQueryRequestOptions);
assertThat(feedResponseIterator1.iterator().hasNext()).isTrue();
SqlQuerySpec querySpec = new SqlQuerySpec(query);
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator2 =
container.getScripts().queryStoredProcedures(query, cosmosQueryRequestOptions);
assertThat(feedResponseIterator2.iterator().hasNext()).isTrue();
}
private void validateResponse(CosmosStoredProcedureProperties properties,
CosmosStoredProcedureResponse createResponse) {
assertThat(createResponse.getProperties().getId()).isNotNull();
assertThat(createResponse.getProperties().getId())
.as("check Resource Id")
.isEqualTo(properties.getId());
}
} | class CosmosSyncStoredProcTest extends TestSuiteBase {
private String preExistingDatabaseId = CosmosDatabaseForTest.generateId();
private List<String> databases = new ArrayList<>();
private CosmosClient client;
private CosmosContainer container;
@Factory(dataProvider = "clientBuilders")
public CosmosSyncStoredProcTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void before_CosmosSyncStoredProcTest() {
assertThat(this.client).isNull();
this.client = getClientBuilder().buildClient();
CosmosAsyncContainer asyncContainer = getSharedMultiPartitionCosmosContainer(this.client.asyncClient());
container = client.getDatabase(asyncContainer.getDatabase().getId()).getContainer(asyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.client).isNotNull();
this.client.close();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void createStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
storedProcedureDef.setId(UUID.randomUUID().toString());
storedProcedureDef.setBody("function() {var x = 11;}");
CosmosStoredProcedureResponse response1 = container.getScripts()
.createStoredProcedure(storedProcedureDef,
new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, response1);
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void createSproc_alreadyExists() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
try {
container.getScripts().createStoredProcedure(storedProcedureDef);
} catch (Exception e) {
assertThat(e).isInstanceOf(CosmosException.class);
assertThat(((CosmosException) e).getStatusCode()).isEqualTo(HttpConstants.StatusCodes.CONFLICT);
}
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void readStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
CosmosStoredProcedure storedProcedure = container.getScripts().getStoredProcedure(storedProcedureDef.getId());
CosmosStoredProcedureResponse readResponse = storedProcedure.read();
validateResponse(storedProcedureDef, readResponse);
CosmosStoredProcedureResponse readResponse2 =
storedProcedure.read(new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, readResponse2);
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void replaceStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
CosmosStoredProcedureResponse readResponse = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.read();
validateResponse(storedProcedureDef, readResponse);
storedProcedureDef = readResponse.getProperties();
storedProcedureDef.setBody("function(){ var y = 20;}");
CosmosStoredProcedureResponse replaceResponse = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.replace(storedProcedureDef);
validateResponse(storedProcedureDef, replaceResponse);
storedProcedureDef.setBody("function(){ var z = 2;}");
CosmosStoredProcedureResponse replaceResponse2 = container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.replace(storedProcedureDef,
new CosmosStoredProcedureRequestOptions());
validateResponse(storedProcedureDef, replaceResponse2);
}
private CosmosStoredProcedureProperties getCosmosStoredProcedureProperties() {
CosmosStoredProcedureProperties storedProcedureDef = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {var x = 10;}"
);
return storedProcedureDef;
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void deleteStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
CosmosStoredProcedureResponse response = container.getScripts().createStoredProcedure(storedProcedureDef);
validateResponse(storedProcedureDef, response);
container.getScripts()
.getStoredProcedure(storedProcedureDef.getId())
.delete();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
public void executeStoredProcedure() throws Exception {
CosmosStoredProcedureProperties storedProcedure = new CosmosStoredProcedureProperties(
UUID.randomUUID().toString(),
"function() {" +
" var mytext = \"x\";" +
" var myval = 1;" +
" try {" +
" console.log(\"The value of %s is %s.\", mytext, myval);" +
" getContext().getResponse().setBody(\"Success!\");" +
" }" +
" catch(err) {" +
" getContext().getResponse().setBody(\"inline err: [\" + err.number + \"] \" + err);" +
" }" +
"}");
container.getScripts().createStoredProcedure(storedProcedure);
CosmosStoredProcedureRequestOptions options = new CosmosStoredProcedureRequestOptions();
options.setPartitionKey(PartitionKey.NONE);
CosmosStoredProcedureResponse executeResponse = container.getScripts()
.getStoredProcedure(storedProcedure.getId())
.execute(null, options);
assertThat(executeResponse.getActivityId()).isNotEmpty();
assertThat(executeResponse.getScriptLog()).isNull();
}
@Test(groups = "simple", timeOut = TIMEOUT)
@Test(groups = {"simple"}, timeOut = TIMEOUT)
private void readAllSprocs() throws Exception {
CosmosStoredProcedureProperties storedProcedureDef = getCosmosStoredProcedureProperties();
container.getScripts().createStoredProcedure(storedProcedureDef);
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator3 =
container.getScripts().readAllStoredProcedures(cosmosQueryRequestOptions);
assertThat(feedResponseIterator3.iterator().hasNext()).isTrue();
}
@Test(groups = {"simple"}, timeOut = TIMEOUT)
private void querySprocs() throws Exception {
CosmosStoredProcedureProperties properties = getCosmosStoredProcedureProperties();
container.getScripts().createStoredProcedure(properties);
String query = String.format("SELECT * from c where c.id = '%s'", properties.getId());
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator1 =
container.getScripts().queryStoredProcedures(query, cosmosQueryRequestOptions);
assertThat(feedResponseIterator1.iterator().hasNext()).isTrue();
SqlQuerySpec querySpec = new SqlQuerySpec(query);
CosmosPagedIterable<CosmosStoredProcedureProperties> feedResponseIterator2 =
container.getScripts().queryStoredProcedures(query, cosmosQueryRequestOptions);
assertThat(feedResponseIterator2.iterator().hasNext()).isTrue();
}
private void validateResponse(CosmosStoredProcedureProperties properties,
CosmosStoredProcedureResponse createResponse) {
assertThat(createResponse.getProperties().getId()).isNotNull();
assertThat(createResponse.getProperties().getId())
.as("check Resource Id")
.isEqualTo(properties.getId());
}
} |
Can we use StringUtils.isEmpty here ? | public static String decodeAsUTF8String(String inputString) {
if (inputString == null || inputString.isEmpty()) {
return inputString;
}
try {
return URLDecoder.decode(inputString, StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException e) {
logger.warn("Error while decoding input string", e);
return inputString;
}
} | if (inputString == null || inputString.isEmpty()) { | public static String decodeAsUTF8String(String inputString) {
if (inputString == null || inputString.isEmpty()) {
return inputString;
}
try {
return URLDecoder.decode(inputString, StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException e) {
logger.warn("Error while decoding input string", e);
return inputString;
}
} | class Utils {
private final static Logger logger = LoggerFactory.getLogger(Utils.class);
private static final int ONE_KB = 1024;
private static final ZoneId GMT_ZONE_ID = ZoneId.of("GMT");
public static final Base64.Encoder Base64Encoder = Base64.getEncoder();
public static final Base64.Decoder Base64Decoder = Base64.getDecoder();
private static final ObjectMapper simpleObjectMapper = new ObjectMapper();
private static final TimeBasedGenerator TIME_BASED_GENERATOR =
Generators.timeBasedGenerator(EthernetAddress.constructMulticastAddress());
private static final DateTimeFormatter RFC_1123_DATE_TIME = DateTimeFormatter.ofPattern("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US);
static {
Utils.simpleObjectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Utils.simpleObjectMapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
Utils.simpleObjectMapper.configure(JsonParser.Feature.ALLOW_TRAILING_COMMA, true);
Utils.simpleObjectMapper.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
Utils.simpleObjectMapper.configure(DeserializationFeature.ACCEPT_FLOAT_AS_INT, false);
Utils.simpleObjectMapper.registerModule(new AfterburnerModule());
}
public static byte[] getUTF8BytesOrNull(String str) {
if (str == null) {
return null;
}
return str.getBytes(StandardCharsets.UTF_8);
}
public static byte[] getUTF8Bytes(String str) {
return str.getBytes(StandardCharsets.UTF_8);
}
public static String encodeBase64String(byte[] binaryData) {
String encodedString = Base64Encoder.encodeToString(binaryData);
if (encodedString.endsWith("\r\n")) {
encodedString = encodedString.substring(0, encodedString.length() - 2);
}
return encodedString;
}
/**
* Checks whether the specified link is Name based or not
*
* @param link the link to analyze.
* @return true or false
*/
public static boolean isNameBased(String link) {
if (StringUtils.isEmpty(link)) {
return false;
}
if (link.startsWith("/") && link.length() > 1) {
link = link.substring(1);
}
String[] parts = StringUtils.split(link, "/");
if (parts.length == 0 || StringUtils.isEmpty(parts[0])
|| !parts[0].equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) {
return false;
}
if (parts.length < 2 || StringUtils.isEmpty(parts[1])) {
return false;
}
String databaseID = parts[1];
if (databaseID.length() != 8) {
return true;
}
byte[] buffer = ResourceId.fromBase64String(databaseID);
if (buffer.length != 4) {
return true;
}
return false;
}
/**
* Checks whether the specified link is a Database Self Link or a Database
* ID based link
*
* @param link the link to analyze.
* @return true or false
*/
public static boolean isDatabaseLink(String link) {
if (StringUtils.isEmpty(link)) {
return false;
}
link = trimBeginningAndEndingSlashes(link);
String[] parts = StringUtils.split(link, "/");
if (parts.length != 2) {
return false;
}
if (StringUtils.isEmpty(parts[0]) || !parts[0].equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) {
return false;
}
if (StringUtils.isEmpty(parts[1])) {
return false;
}
return true;
}
/**
* Joins the specified paths by appropriately padding them with '/'
*
* @param path1 the first path segment to join.
* @param path2 the second path segment to join.
* @return the concatenated path with '/'
*/
public static String joinPath(String path1, String path2) {
path1 = trimBeginningAndEndingSlashes(path1);
String result = "/" + path1 + "/";
if (!StringUtils.isEmpty(path2)) {
path2 = trimBeginningAndEndingSlashes(path2);
result += path2 + "/";
}
return result;
}
/**
* Trims the beginning and ending '/' from the given path
*
* @param path the path to trim for beginning and ending slashes
* @return the path without beginning and ending '/'
*/
public static String trimBeginningAndEndingSlashes(String path) {
if(path == null) {
return null;
}
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path;
}
public static Map<String, String> paramEncode(Map<String, String> queryParams) {
HashMap<String, String> map = new HashMap<>();
for(Map.Entry<String, String> paramEntry: queryParams.entrySet()) {
try {
map.put(paramEntry.getKey(), URLEncoder.encode(paramEntry.getValue(), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException(e);
}
}
return map;
}
public static String createQuery(Map<String, String> queryParameters) {
if (queryParameters == null)
return "";
StringBuilder queryString = new StringBuilder();
for (Map.Entry<String, String> nameValuePair : queryParameters.entrySet()) {
String key = nameValuePair.getKey();
String value = nameValuePair.getValue();
if (key != null && !key.isEmpty()) {
if (queryString.length() > 0) {
queryString.append(RuntimeConstants.Separators.Query[1]);
}
queryString.append(key);
if (value != null) {
queryString.append(RuntimeConstants.Separators.Query[2]);
queryString.append(value);
}
}
}
return queryString.toString();
}
public static URI setQuery(String urlString, String query) {
if (urlString == null)
throw new IllegalStateException("urlString parameter can't be null.");
query = Utils.removeLeadingQuestionMark(query);
try {
if (query != null && !query.isEmpty()) {
return new URI(Utils.addTrailingSlash(urlString) + RuntimeConstants.Separators.Query[0] + query);
} else {
return new URI(Utils.addTrailingSlash(urlString));
}
} catch (URISyntaxException e) {
throw new IllegalStateException("Uri is invalid: ", e);
}
}
/**
* Given the full path to a resource, extract the collection path.
*
* @param resourceFullName the full path to the resource.
* @return the path of the collection in which the resource is.
*/
public static String getCollectionName(String resourceFullName) {
if (resourceFullName != null) {
resourceFullName = Utils.trimBeginningAndEndingSlashes(resourceFullName);
int slashCount = 0;
for (int i = 0; i < resourceFullName.length(); i++) {
if (resourceFullName.charAt(i) == '/') {
slashCount++;
if (slashCount == 4) {
return resourceFullName.substring(0, i);
}
}
}
}
return resourceFullName;
}
public static <T> int getCollectionSize(Collection<T> collection) {
if (collection == null) {
return 0;
}
return collection.size();
}
public static Boolean isCollectionPartitioned(DocumentCollection collection) {
if (collection == null) {
throw new IllegalArgumentException("collection");
}
return collection.getPartitionKey() != null
&& collection.getPartitionKey().getPaths() != null
&& collection.getPartitionKey().getPaths().size() > 0;
}
public static boolean isCollectionChild(ResourceType type) {
return type == ResourceType.Document || type == ResourceType.Attachment || type == ResourceType.Conflict
|| type == ResourceType.StoredProcedure || type == ResourceType.Trigger || type == ResourceType.UserDefinedFunction;
}
public static boolean isWriteOperation(OperationType operationType) {
return operationType == OperationType.Create || operationType == OperationType.Upsert || operationType == OperationType.Delete || operationType == OperationType.Replace
|| operationType == OperationType.ExecuteJavaScript;
}
public static boolean isFeedRequest(OperationType requestOperationType) {
return requestOperationType == OperationType.Create ||
requestOperationType == OperationType.Upsert ||
requestOperationType == OperationType.ReadFeed ||
requestOperationType == OperationType.Query ||
requestOperationType == OperationType.SqlQuery ||
requestOperationType == OperationType.HeadFeed;
}
private static String addTrailingSlash(String path) {
if (path == null || path.isEmpty())
path = new String(RuntimeConstants.Separators.Url);
else if (path.charAt(path.length() - 1) != RuntimeConstants.Separators.Url[0])
path = path + RuntimeConstants.Separators.Url[0];
return path;
}
private static String removeLeadingQuestionMark(String path) {
if (path == null || path.isEmpty())
return path;
if (path.charAt(0) == RuntimeConstants.Separators.Query[0])
return path.substring(1);
return path;
}
public static boolean isValidConsistency(ConsistencyLevel backendConsistency,
ConsistencyLevel desiredConsistency) {
switch (backendConsistency) {
case STRONG:
return desiredConsistency == ConsistencyLevel.STRONG ||
desiredConsistency == ConsistencyLevel.BOUNDED_STALENESS ||
desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
case BOUNDED_STALENESS:
return desiredConsistency == ConsistencyLevel.BOUNDED_STALENESS ||
desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
case SESSION:
case EVENTUAL:
case CONSISTENT_PREFIX:
return desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
default:
throw new IllegalArgumentException("backendConsistency");
}
}
public static String getUserAgent() {
return getUserAgent(HttpConstants.Versions.SDK_NAME, HttpConstants.Versions.SDK_VERSION);
}
public static String getUserAgent(String sdkName, String sdkVersion) {
String osName = System.getProperty("os.name");
if (osName == null) {
osName = "Unknown";
}
osName = osName.replaceAll("\\s", "");
String userAgent = String.format("%s/%s JRE/%s %s/%s",
osName,
System.getProperty("os.version"),
System.getProperty("java.version"),
sdkName,
sdkVersion);
return userAgent;
}
public static ObjectMapper getSimpleObjectMapper() {
return Utils.simpleObjectMapper;
}
/**
* Returns Current Time in RFC 1123 format, e.g,
* Fri, 01 Dec 2017 19:22:30 GMT.
*
* @return an instance of STRING
*/
public static String nowAsRFC1123() {
ZonedDateTime now = ZonedDateTime.now(GMT_ZONE_ID);
return Utils.RFC_1123_DATE_TIME.format(now);
}
public static UUID randomUUID() {
return TIME_BASED_GENERATOR.generate();
}
public static String zonedDateTimeAsUTCRFC1123(OffsetDateTime offsetDateTime){
return Utils.RFC_1123_DATE_TIME.format(offsetDateTime.atZoneSameInstant(GMT_ZONE_ID));
}
public static int getValueOrDefault(Integer val, int defaultValue) {
return val != null ? val.intValue() : defaultValue;
}
public static void checkStateOrThrow(boolean value, String argumentName, String message) throws IllegalArgumentException {
IllegalArgumentException t = checkStateOrReturnException(value, argumentName, message);
if (t != null) {
throw t;
}
}
public static void checkNotNullOrThrow(Object val, String argumentName, String message) throws NullPointerException {
NullPointerException t = checkNotNullOrReturnException(val, argumentName, message);
if (t != null) {
throw t;
}
}
public static void checkStateOrThrow(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) throws IllegalArgumentException {
IllegalArgumentException t = checkStateOrReturnException(value, argumentName, argumentName, messageTemplateParams);
if (t != null) {
throw t;
}
}
public static IllegalArgumentException checkStateOrReturnException(boolean value, String argumentName, String message) {
if (value) {
return null;
}
return new IllegalArgumentException(String.format("argumentName: %s, message: %s", argumentName, message));
}
public static IllegalArgumentException checkStateOrReturnException(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (value) {
return null;
}
return new IllegalArgumentException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
private static NullPointerException checkNotNullOrReturnException(Object val, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (val != null) {
return null;
}
return new NullPointerException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
public static BadRequestException checkRequestOrReturnException(boolean value, String argumentName, String message) {
if (value) {
return null;
}
return new BadRequestException(String.format("argumentName: %s, message: %s", argumentName, message));
}
public static BadRequestException checkRequestOrReturnException(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (value) {
return null;
}
return new BadRequestException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
@SuppressWarnings("unchecked")
public static <O, I> O as(I i, Class<O> klass) {
if (i == null) {
return null;
}
if (klass.isInstance(i)) {
return (O) i;
} else {
return null;
}
}
@SuppressWarnings("unchecked")
public static <V> List<V> immutableListOf() {
return Collections.EMPTY_LIST;
}
public static <V> List<V> immutableListOf(V v1) {
List<V> list = new ArrayList<>();
list.add(v1);
return Collections.unmodifiableList(list);
}
public static <K, V> Map<K, V>immutableMapOf() {
return Collections.emptyMap();
}
public static <K, V> Map<K, V>immutableMapOf(K k1, V v1) {
Map<K, V> map = new HashMap<K ,V>();
map.put(k1, v1);
map = Collections.unmodifiableMap(map);
return map;
}
public static <V> V firstOrDefault(List<V> list) {
return list.size() > 0? list.get(0) : null ;
}
public static class ValueHolder<V> {
public ValueHolder() {
}
public ValueHolder(V v) {
this.v = v;
}
public V v;
public static <T> ValueHolder<T> initialize(T v) {
return new ValueHolder<T>(v);
}
}
public static <K, V> boolean tryGetValue(Map<K, V> dictionary, K key, ValueHolder<V> holder) {
holder.v = dictionary.get(key);
return holder.v != null;
}
public static <K, V> boolean tryRemove(Map<K, V> dictionary, K key, ValueHolder<V> holder) {
holder.v = dictionary.remove(key);
return holder.v != null;
}
public static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) {
if (StringUtils.isEmpty(itemResponseBodyAsString)) {
return null;
}
try {
return getSimpleObjectMapper().readValue(itemResponseBodyAsString, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Failed to parse string [%s] to POJO.", itemResponseBodyAsString, e));
}
}
public static <T> T parse(byte[] item, Class<T> itemClassType) {
if (Utils.isEmpty(item)) {
return null;
}
try {
return getSimpleObjectMapper().readValue(item, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Failed to parse byte-array %s to POJO.", Arrays.toString(item)), e);
}
}
public static ByteBuffer serializeJsonToByteBuffer(ObjectMapper objectMapper, Object object) {
try {
ByteBufferOutputStream byteBufferOutputStream = new ByteBufferOutputStream(ONE_KB);
objectMapper.writeValue(byteBufferOutputStream, object);
return byteBufferOutputStream.asByteBuffer();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to serialize the object into json", e);
}
}
public static boolean isEmpty(byte[] bytes) {
return bytes == null || bytes.length == 0;
}
public static String utf8StringFromOrNull(byte[] bytes) {
if (bytes == null) {
return null;
}
return new String(bytes, StandardCharsets.UTF_8);
}
public static void setContinuationTokenAndMaxItemCount(CosmosPagedFluxOptions pagedFluxOptions, CosmosQueryRequestOptions cosmosQueryRequestOptions) {
if (pagedFluxOptions == null) {
return;
}
if (pagedFluxOptions.getRequestContinuation() != null) {
ModelBridgeInternal.setQueryRequestOptionsContinuationToken(cosmosQueryRequestOptions, pagedFluxOptions.getRequestContinuation());
}
if (pagedFluxOptions.getMaxItemCount() != null) {
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(cosmosQueryRequestOptions, pagedFluxOptions.getMaxItemCount());
}
}
static String escapeNonAscii(String partitionKeyJson) {
StringBuilder sb = null;
for (int i = 0; i < partitionKeyJson.length(); i++) {
int val = partitionKeyJson.charAt(i);
if (val > 127) {
if (sb == null) {
sb = new StringBuilder(partitionKeyJson.length());
sb.append(partitionKeyJson, 0, i);
}
sb.append("\\u").append(String.format("%04X", val));
} else {
if (sb != null) {
sb.append(partitionKeyJson.charAt(i));
}
}
}
if (sb == null) {
return partitionKeyJson;
} else {
return sb.toString();
}
}
static byte[] toByteArray(ByteBuf buf) {
byte[] bytes = new byte[buf.readableBytes()];
buf.readBytes(bytes);
return bytes;
}
public static String toJson(ObjectMapper mapper, ObjectNode object) {
try {
return mapper.writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new IllegalStateException("Unable to convert JSON to STRING", e);
}
}
public static byte[] serializeObjectToByteArray(Object obj) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(out);
os.writeObject(obj);
return out.toByteArray();
}
} | class Utils {
private final static Logger logger = LoggerFactory.getLogger(Utils.class);
private static final int ONE_KB = 1024;
private static final ZoneId GMT_ZONE_ID = ZoneId.of("GMT");
public static final Base64.Encoder Base64Encoder = Base64.getEncoder();
public static final Base64.Decoder Base64Decoder = Base64.getDecoder();
private static final ObjectMapper simpleObjectMapper = new ObjectMapper();
private static final TimeBasedGenerator TIME_BASED_GENERATOR =
Generators.timeBasedGenerator(EthernetAddress.constructMulticastAddress());
private static final DateTimeFormatter RFC_1123_DATE_TIME = DateTimeFormatter.ofPattern("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US);
static {
Utils.simpleObjectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Utils.simpleObjectMapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
Utils.simpleObjectMapper.configure(JsonParser.Feature.ALLOW_TRAILING_COMMA, true);
Utils.simpleObjectMapper.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
Utils.simpleObjectMapper.configure(DeserializationFeature.ACCEPT_FLOAT_AS_INT, false);
Utils.simpleObjectMapper.registerModule(new AfterburnerModule());
}
public static byte[] getUTF8BytesOrNull(String str) {
if (str == null) {
return null;
}
return str.getBytes(StandardCharsets.UTF_8);
}
public static byte[] getUTF8Bytes(String str) {
return str.getBytes(StandardCharsets.UTF_8);
}
public static String encodeBase64String(byte[] binaryData) {
String encodedString = Base64Encoder.encodeToString(binaryData);
if (encodedString.endsWith("\r\n")) {
encodedString = encodedString.substring(0, encodedString.length() - 2);
}
return encodedString;
}
/**
* Checks whether the specified link is Name based or not
*
* @param link the link to analyze.
* @return true or false
*/
public static boolean isNameBased(String link) {
if (StringUtils.isEmpty(link)) {
return false;
}
if (link.startsWith("/") && link.length() > 1) {
link = link.substring(1);
}
String[] parts = StringUtils.split(link, "/");
if (parts.length == 0 || StringUtils.isEmpty(parts[0])
|| !parts[0].equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) {
return false;
}
if (parts.length < 2 || StringUtils.isEmpty(parts[1])) {
return false;
}
String databaseID = parts[1];
if (databaseID.length() != 8) {
return true;
}
byte[] buffer = ResourceId.fromBase64String(databaseID);
if (buffer.length != 4) {
return true;
}
return false;
}
/**
* Checks whether the specified link is a Database Self Link or a Database
* ID based link
*
* @param link the link to analyze.
* @return true or false
*/
public static boolean isDatabaseLink(String link) {
if (StringUtils.isEmpty(link)) {
return false;
}
link = trimBeginningAndEndingSlashes(link);
String[] parts = StringUtils.split(link, "/");
if (parts.length != 2) {
return false;
}
if (StringUtils.isEmpty(parts[0]) || !parts[0].equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) {
return false;
}
if (StringUtils.isEmpty(parts[1])) {
return false;
}
return true;
}
/**
* Joins the specified paths by appropriately padding them with '/'
*
* @param path1 the first path segment to join.
* @param path2 the second path segment to join.
* @return the concatenated path with '/'
*/
public static String joinPath(String path1, String path2) {
path1 = trimBeginningAndEndingSlashes(path1);
String result = "/" + path1 + "/";
if (!StringUtils.isEmpty(path2)) {
path2 = trimBeginningAndEndingSlashes(path2);
result += path2 + "/";
}
return result;
}
/**
* Trims the beginning and ending '/' from the given path
*
* @param path the path to trim for beginning and ending slashes
* @return the path without beginning and ending '/'
*/
public static String trimBeginningAndEndingSlashes(String path) {
if(path == null) {
return null;
}
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path;
}
public static Map<String, String> paramEncode(Map<String, String> queryParams) {
HashMap<String, String> map = new HashMap<>();
for(Map.Entry<String, String> paramEntry: queryParams.entrySet()) {
try {
map.put(paramEntry.getKey(), URLEncoder.encode(paramEntry.getValue(), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException(e);
}
}
return map;
}
public static String createQuery(Map<String, String> queryParameters) {
if (queryParameters == null)
return "";
StringBuilder queryString = new StringBuilder();
for (Map.Entry<String, String> nameValuePair : queryParameters.entrySet()) {
String key = nameValuePair.getKey();
String value = nameValuePair.getValue();
if (key != null && !key.isEmpty()) {
if (queryString.length() > 0) {
queryString.append(RuntimeConstants.Separators.Query[1]);
}
queryString.append(key);
if (value != null) {
queryString.append(RuntimeConstants.Separators.Query[2]);
queryString.append(value);
}
}
}
return queryString.toString();
}
public static URI setQuery(String urlString, String query) {
if (urlString == null)
throw new IllegalStateException("urlString parameter can't be null.");
query = Utils.removeLeadingQuestionMark(query);
try {
if (query != null && !query.isEmpty()) {
return new URI(Utils.addTrailingSlash(urlString) + RuntimeConstants.Separators.Query[0] + query);
} else {
return new URI(Utils.addTrailingSlash(urlString));
}
} catch (URISyntaxException e) {
throw new IllegalStateException("Uri is invalid: ", e);
}
}
/**
* Given the full path to a resource, extract the collection path.
*
* @param resourceFullName the full path to the resource.
* @return the path of the collection in which the resource is.
*/
public static String getCollectionName(String resourceFullName) {
if (resourceFullName != null) {
resourceFullName = Utils.trimBeginningAndEndingSlashes(resourceFullName);
int slashCount = 0;
for (int i = 0; i < resourceFullName.length(); i++) {
if (resourceFullName.charAt(i) == '/') {
slashCount++;
if (slashCount == 4) {
return resourceFullName.substring(0, i);
}
}
}
}
return resourceFullName;
}
public static <T> int getCollectionSize(Collection<T> collection) {
if (collection == null) {
return 0;
}
return collection.size();
}
public static Boolean isCollectionPartitioned(DocumentCollection collection) {
if (collection == null) {
throw new IllegalArgumentException("collection");
}
return collection.getPartitionKey() != null
&& collection.getPartitionKey().getPaths() != null
&& collection.getPartitionKey().getPaths().size() > 0;
}
public static boolean isCollectionChild(ResourceType type) {
return type == ResourceType.Document || type == ResourceType.Attachment || type == ResourceType.Conflict
|| type == ResourceType.StoredProcedure || type == ResourceType.Trigger || type == ResourceType.UserDefinedFunction;
}
public static boolean isWriteOperation(OperationType operationType) {
return operationType == OperationType.Create || operationType == OperationType.Upsert || operationType == OperationType.Delete || operationType == OperationType.Replace
|| operationType == OperationType.ExecuteJavaScript;
}
public static boolean isFeedRequest(OperationType requestOperationType) {
return requestOperationType == OperationType.Create ||
requestOperationType == OperationType.Upsert ||
requestOperationType == OperationType.ReadFeed ||
requestOperationType == OperationType.Query ||
requestOperationType == OperationType.SqlQuery ||
requestOperationType == OperationType.HeadFeed;
}
private static String addTrailingSlash(String path) {
if (path == null || path.isEmpty())
path = new String(RuntimeConstants.Separators.Url);
else if (path.charAt(path.length() - 1) != RuntimeConstants.Separators.Url[0])
path = path + RuntimeConstants.Separators.Url[0];
return path;
}
private static String removeLeadingQuestionMark(String path) {
if (path == null || path.isEmpty())
return path;
if (path.charAt(0) == RuntimeConstants.Separators.Query[0])
return path.substring(1);
return path;
}
public static boolean isValidConsistency(ConsistencyLevel backendConsistency,
ConsistencyLevel desiredConsistency) {
switch (backendConsistency) {
case STRONG:
return desiredConsistency == ConsistencyLevel.STRONG ||
desiredConsistency == ConsistencyLevel.BOUNDED_STALENESS ||
desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
case BOUNDED_STALENESS:
return desiredConsistency == ConsistencyLevel.BOUNDED_STALENESS ||
desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
case SESSION:
case EVENTUAL:
case CONSISTENT_PREFIX:
return desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
default:
throw new IllegalArgumentException("backendConsistency");
}
}
public static String getUserAgent() {
return getUserAgent(HttpConstants.Versions.SDK_NAME, HttpConstants.Versions.SDK_VERSION);
}
public static String getUserAgent(String sdkName, String sdkVersion) {
String osName = System.getProperty("os.name");
if (osName == null) {
osName = "Unknown";
}
osName = osName.replaceAll("\\s", "");
String userAgent = String.format("%s/%s JRE/%s %s/%s",
osName,
System.getProperty("os.version"),
System.getProperty("java.version"),
sdkName,
sdkVersion);
return userAgent;
}
public static ObjectMapper getSimpleObjectMapper() {
return Utils.simpleObjectMapper;
}
/**
* Returns Current Time in RFC 1123 format, e.g,
* Fri, 01 Dec 2017 19:22:30 GMT.
*
* @return an instance of STRING
*/
public static String nowAsRFC1123() {
ZonedDateTime now = ZonedDateTime.now(GMT_ZONE_ID);
return Utils.RFC_1123_DATE_TIME.format(now);
}
public static UUID randomUUID() {
return TIME_BASED_GENERATOR.generate();
}
public static String zonedDateTimeAsUTCRFC1123(OffsetDateTime offsetDateTime){
return Utils.RFC_1123_DATE_TIME.format(offsetDateTime.atZoneSameInstant(GMT_ZONE_ID));
}
public static int getValueOrDefault(Integer val, int defaultValue) {
return val != null ? val.intValue() : defaultValue;
}
public static void checkStateOrThrow(boolean value, String argumentName, String message) throws IllegalArgumentException {
IllegalArgumentException t = checkStateOrReturnException(value, argumentName, message);
if (t != null) {
throw t;
}
}
public static void checkNotNullOrThrow(Object val, String argumentName, String message) throws NullPointerException {
NullPointerException t = checkNotNullOrReturnException(val, argumentName, message);
if (t != null) {
throw t;
}
}
public static void checkStateOrThrow(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) throws IllegalArgumentException {
IllegalArgumentException t = checkStateOrReturnException(value, argumentName, argumentName, messageTemplateParams);
if (t != null) {
throw t;
}
}
public static IllegalArgumentException checkStateOrReturnException(boolean value, String argumentName, String message) {
if (value) {
return null;
}
return new IllegalArgumentException(String.format("argumentName: %s, message: %s", argumentName, message));
}
public static IllegalArgumentException checkStateOrReturnException(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (value) {
return null;
}
return new IllegalArgumentException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
private static NullPointerException checkNotNullOrReturnException(Object val, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (val != null) {
return null;
}
return new NullPointerException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
public static BadRequestException checkRequestOrReturnException(boolean value, String argumentName, String message) {
if (value) {
return null;
}
return new BadRequestException(String.format("argumentName: %s, message: %s", argumentName, message));
}
public static BadRequestException checkRequestOrReturnException(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (value) {
return null;
}
return new BadRequestException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
@SuppressWarnings("unchecked")
public static <O, I> O as(I i, Class<O> klass) {
if (i == null) {
return null;
}
if (klass.isInstance(i)) {
return (O) i;
} else {
return null;
}
}
@SuppressWarnings("unchecked")
public static <V> List<V> immutableListOf() {
return Collections.EMPTY_LIST;
}
public static <V> List<V> immutableListOf(V v1) {
List<V> list = new ArrayList<>();
list.add(v1);
return Collections.unmodifiableList(list);
}
public static <K, V> Map<K, V>immutableMapOf() {
return Collections.emptyMap();
}
public static <K, V> Map<K, V>immutableMapOf(K k1, V v1) {
Map<K, V> map = new HashMap<K ,V>();
map.put(k1, v1);
map = Collections.unmodifiableMap(map);
return map;
}
public static <V> V firstOrDefault(List<V> list) {
return list.size() > 0? list.get(0) : null ;
}
public static class ValueHolder<V> {
public ValueHolder() {
}
public ValueHolder(V v) {
this.v = v;
}
public V v;
public static <T> ValueHolder<T> initialize(T v) {
return new ValueHolder<T>(v);
}
}
public static <K, V> boolean tryGetValue(Map<K, V> dictionary, K key, ValueHolder<V> holder) {
holder.v = dictionary.get(key);
return holder.v != null;
}
public static <K, V> boolean tryRemove(Map<K, V> dictionary, K key, ValueHolder<V> holder) {
holder.v = dictionary.remove(key);
return holder.v != null;
}
public static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) {
if (StringUtils.isEmpty(itemResponseBodyAsString)) {
return null;
}
try {
return getSimpleObjectMapper().readValue(itemResponseBodyAsString, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Failed to parse string [%s] to POJO.", itemResponseBodyAsString, e));
}
}
public static <T> T parse(byte[] item, Class<T> itemClassType) {
if (Utils.isEmpty(item)) {
return null;
}
try {
return getSimpleObjectMapper().readValue(item, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Failed to parse byte-array %s to POJO.", Arrays.toString(item)), e);
}
}
public static ByteBuffer serializeJsonToByteBuffer(ObjectMapper objectMapper, Object object) {
try {
ByteBufferOutputStream byteBufferOutputStream = new ByteBufferOutputStream(ONE_KB);
objectMapper.writeValue(byteBufferOutputStream, object);
return byteBufferOutputStream.asByteBuffer();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to serialize the object into json", e);
}
}
public static boolean isEmpty(byte[] bytes) {
return bytes == null || bytes.length == 0;
}
public static String utf8StringFromOrNull(byte[] bytes) {
if (bytes == null) {
return null;
}
return new String(bytes, StandardCharsets.UTF_8);
}
public static void setContinuationTokenAndMaxItemCount(CosmosPagedFluxOptions pagedFluxOptions, CosmosQueryRequestOptions cosmosQueryRequestOptions) {
if (pagedFluxOptions == null) {
return;
}
if (pagedFluxOptions.getRequestContinuation() != null) {
ModelBridgeInternal.setQueryRequestOptionsContinuationToken(cosmosQueryRequestOptions, pagedFluxOptions.getRequestContinuation());
}
if (pagedFluxOptions.getMaxItemCount() != null) {
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(cosmosQueryRequestOptions, pagedFluxOptions.getMaxItemCount());
}
}
static String escapeNonAscii(String partitionKeyJson) {
StringBuilder sb = null;
for (int i = 0; i < partitionKeyJson.length(); i++) {
int val = partitionKeyJson.charAt(i);
if (val > 127) {
if (sb == null) {
sb = new StringBuilder(partitionKeyJson.length());
sb.append(partitionKeyJson, 0, i);
}
sb.append("\\u").append(String.format("%04X", val));
} else {
if (sb != null) {
sb.append(partitionKeyJson.charAt(i));
}
}
}
if (sb == null) {
return partitionKeyJson;
} else {
return sb.toString();
}
}
static byte[] toByteArray(ByteBuf buf) {
byte[] bytes = new byte[buf.readableBytes()];
buf.readBytes(bytes);
return bytes;
}
public static String toJson(ObjectMapper mapper, ObjectNode object) {
try {
return mapper.writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new IllegalStateException("Unable to convert JSON to STRING", e);
}
}
public static byte[] serializeObjectToByteArray(Object obj) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(out);
os.writeObject(obj);
return out.toByteArray();
}
} |
Yes, we can do that too. Since this is implementation detail, will change it in next PR . | public static String decodeAsUTF8String(String inputString) {
if (inputString == null || inputString.isEmpty()) {
return inputString;
}
try {
return URLDecoder.decode(inputString, StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException e) {
logger.warn("Error while decoding input string", e);
return inputString;
}
} | if (inputString == null || inputString.isEmpty()) { | public static String decodeAsUTF8String(String inputString) {
if (inputString == null || inputString.isEmpty()) {
return inputString;
}
try {
return URLDecoder.decode(inputString, StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException e) {
logger.warn("Error while decoding input string", e);
return inputString;
}
} | class Utils {
private final static Logger logger = LoggerFactory.getLogger(Utils.class);
private static final int ONE_KB = 1024;
private static final ZoneId GMT_ZONE_ID = ZoneId.of("GMT");
public static final Base64.Encoder Base64Encoder = Base64.getEncoder();
public static final Base64.Decoder Base64Decoder = Base64.getDecoder();
private static final ObjectMapper simpleObjectMapper = new ObjectMapper();
private static final TimeBasedGenerator TIME_BASED_GENERATOR =
Generators.timeBasedGenerator(EthernetAddress.constructMulticastAddress());
private static final DateTimeFormatter RFC_1123_DATE_TIME = DateTimeFormatter.ofPattern("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US);
static {
Utils.simpleObjectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Utils.simpleObjectMapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
Utils.simpleObjectMapper.configure(JsonParser.Feature.ALLOW_TRAILING_COMMA, true);
Utils.simpleObjectMapper.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
Utils.simpleObjectMapper.configure(DeserializationFeature.ACCEPT_FLOAT_AS_INT, false);
Utils.simpleObjectMapper.registerModule(new AfterburnerModule());
}
public static byte[] getUTF8BytesOrNull(String str) {
if (str == null) {
return null;
}
return str.getBytes(StandardCharsets.UTF_8);
}
public static byte[] getUTF8Bytes(String str) {
return str.getBytes(StandardCharsets.UTF_8);
}
public static String encodeBase64String(byte[] binaryData) {
String encodedString = Base64Encoder.encodeToString(binaryData);
if (encodedString.endsWith("\r\n")) {
encodedString = encodedString.substring(0, encodedString.length() - 2);
}
return encodedString;
}
/**
* Checks whether the specified link is Name based or not
*
* @param link the link to analyze.
* @return true or false
*/
public static boolean isNameBased(String link) {
if (StringUtils.isEmpty(link)) {
return false;
}
if (link.startsWith("/") && link.length() > 1) {
link = link.substring(1);
}
String[] parts = StringUtils.split(link, "/");
if (parts.length == 0 || StringUtils.isEmpty(parts[0])
|| !parts[0].equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) {
return false;
}
if (parts.length < 2 || StringUtils.isEmpty(parts[1])) {
return false;
}
String databaseID = parts[1];
if (databaseID.length() != 8) {
return true;
}
byte[] buffer = ResourceId.fromBase64String(databaseID);
if (buffer.length != 4) {
return true;
}
return false;
}
/**
* Checks whether the specified link is a Database Self Link or a Database
* ID based link
*
* @param link the link to analyze.
* @return true or false
*/
public static boolean isDatabaseLink(String link) {
if (StringUtils.isEmpty(link)) {
return false;
}
link = trimBeginningAndEndingSlashes(link);
String[] parts = StringUtils.split(link, "/");
if (parts.length != 2) {
return false;
}
if (StringUtils.isEmpty(parts[0]) || !parts[0].equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) {
return false;
}
if (StringUtils.isEmpty(parts[1])) {
return false;
}
return true;
}
/**
* Joins the specified paths by appropriately padding them with '/'
*
* @param path1 the first path segment to join.
* @param path2 the second path segment to join.
* @return the concatenated path with '/'
*/
public static String joinPath(String path1, String path2) {
path1 = trimBeginningAndEndingSlashes(path1);
String result = "/" + path1 + "/";
if (!StringUtils.isEmpty(path2)) {
path2 = trimBeginningAndEndingSlashes(path2);
result += path2 + "/";
}
return result;
}
/**
* Trims the beginning and ending '/' from the given path
*
* @param path the path to trim for beginning and ending slashes
* @return the path without beginning and ending '/'
*/
public static String trimBeginningAndEndingSlashes(String path) {
if(path == null) {
return null;
}
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path;
}
public static Map<String, String> paramEncode(Map<String, String> queryParams) {
HashMap<String, String> map = new HashMap<>();
for(Map.Entry<String, String> paramEntry: queryParams.entrySet()) {
try {
map.put(paramEntry.getKey(), URLEncoder.encode(paramEntry.getValue(), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException(e);
}
}
return map;
}
public static String createQuery(Map<String, String> queryParameters) {
if (queryParameters == null)
return "";
StringBuilder queryString = new StringBuilder();
for (Map.Entry<String, String> nameValuePair : queryParameters.entrySet()) {
String key = nameValuePair.getKey();
String value = nameValuePair.getValue();
if (key != null && !key.isEmpty()) {
if (queryString.length() > 0) {
queryString.append(RuntimeConstants.Separators.Query[1]);
}
queryString.append(key);
if (value != null) {
queryString.append(RuntimeConstants.Separators.Query[2]);
queryString.append(value);
}
}
}
return queryString.toString();
}
public static URI setQuery(String urlString, String query) {
if (urlString == null)
throw new IllegalStateException("urlString parameter can't be null.");
query = Utils.removeLeadingQuestionMark(query);
try {
if (query != null && !query.isEmpty()) {
return new URI(Utils.addTrailingSlash(urlString) + RuntimeConstants.Separators.Query[0] + query);
} else {
return new URI(Utils.addTrailingSlash(urlString));
}
} catch (URISyntaxException e) {
throw new IllegalStateException("Uri is invalid: ", e);
}
}
/**
* Given the full path to a resource, extract the collection path.
*
* @param resourceFullName the full path to the resource.
* @return the path of the collection in which the resource is.
*/
public static String getCollectionName(String resourceFullName) {
if (resourceFullName != null) {
resourceFullName = Utils.trimBeginningAndEndingSlashes(resourceFullName);
int slashCount = 0;
for (int i = 0; i < resourceFullName.length(); i++) {
if (resourceFullName.charAt(i) == '/') {
slashCount++;
if (slashCount == 4) {
return resourceFullName.substring(0, i);
}
}
}
}
return resourceFullName;
}
public static <T> int getCollectionSize(Collection<T> collection) {
if (collection == null) {
return 0;
}
return collection.size();
}
public static Boolean isCollectionPartitioned(DocumentCollection collection) {
if (collection == null) {
throw new IllegalArgumentException("collection");
}
return collection.getPartitionKey() != null
&& collection.getPartitionKey().getPaths() != null
&& collection.getPartitionKey().getPaths().size() > 0;
}
public static boolean isCollectionChild(ResourceType type) {
return type == ResourceType.Document || type == ResourceType.Attachment || type == ResourceType.Conflict
|| type == ResourceType.StoredProcedure || type == ResourceType.Trigger || type == ResourceType.UserDefinedFunction;
}
public static boolean isWriteOperation(OperationType operationType) {
return operationType == OperationType.Create || operationType == OperationType.Upsert || operationType == OperationType.Delete || operationType == OperationType.Replace
|| operationType == OperationType.ExecuteJavaScript;
}
public static boolean isFeedRequest(OperationType requestOperationType) {
return requestOperationType == OperationType.Create ||
requestOperationType == OperationType.Upsert ||
requestOperationType == OperationType.ReadFeed ||
requestOperationType == OperationType.Query ||
requestOperationType == OperationType.SqlQuery ||
requestOperationType == OperationType.HeadFeed;
}
private static String addTrailingSlash(String path) {
if (path == null || path.isEmpty())
path = new String(RuntimeConstants.Separators.Url);
else if (path.charAt(path.length() - 1) != RuntimeConstants.Separators.Url[0])
path = path + RuntimeConstants.Separators.Url[0];
return path;
}
private static String removeLeadingQuestionMark(String path) {
if (path == null || path.isEmpty())
return path;
if (path.charAt(0) == RuntimeConstants.Separators.Query[0])
return path.substring(1);
return path;
}
public static boolean isValidConsistency(ConsistencyLevel backendConsistency,
ConsistencyLevel desiredConsistency) {
switch (backendConsistency) {
case STRONG:
return desiredConsistency == ConsistencyLevel.STRONG ||
desiredConsistency == ConsistencyLevel.BOUNDED_STALENESS ||
desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
case BOUNDED_STALENESS:
return desiredConsistency == ConsistencyLevel.BOUNDED_STALENESS ||
desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
case SESSION:
case EVENTUAL:
case CONSISTENT_PREFIX:
return desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
default:
throw new IllegalArgumentException("backendConsistency");
}
}
public static String getUserAgent() {
return getUserAgent(HttpConstants.Versions.SDK_NAME, HttpConstants.Versions.SDK_VERSION);
}
public static String getUserAgent(String sdkName, String sdkVersion) {
String osName = System.getProperty("os.name");
if (osName == null) {
osName = "Unknown";
}
osName = osName.replaceAll("\\s", "");
String userAgent = String.format("%s/%s JRE/%s %s/%s",
osName,
System.getProperty("os.version"),
System.getProperty("java.version"),
sdkName,
sdkVersion);
return userAgent;
}
public static ObjectMapper getSimpleObjectMapper() {
return Utils.simpleObjectMapper;
}
/**
* Returns Current Time in RFC 1123 format, e.g,
* Fri, 01 Dec 2017 19:22:30 GMT.
*
* @return an instance of STRING
*/
public static String nowAsRFC1123() {
ZonedDateTime now = ZonedDateTime.now(GMT_ZONE_ID);
return Utils.RFC_1123_DATE_TIME.format(now);
}
public static UUID randomUUID() {
return TIME_BASED_GENERATOR.generate();
}
public static String zonedDateTimeAsUTCRFC1123(OffsetDateTime offsetDateTime){
return Utils.RFC_1123_DATE_TIME.format(offsetDateTime.atZoneSameInstant(GMT_ZONE_ID));
}
public static int getValueOrDefault(Integer val, int defaultValue) {
return val != null ? val.intValue() : defaultValue;
}
public static void checkStateOrThrow(boolean value, String argumentName, String message) throws IllegalArgumentException {
IllegalArgumentException t = checkStateOrReturnException(value, argumentName, message);
if (t != null) {
throw t;
}
}
public static void checkNotNullOrThrow(Object val, String argumentName, String message) throws NullPointerException {
NullPointerException t = checkNotNullOrReturnException(val, argumentName, message);
if (t != null) {
throw t;
}
}
public static void checkStateOrThrow(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) throws IllegalArgumentException {
IllegalArgumentException t = checkStateOrReturnException(value, argumentName, argumentName, messageTemplateParams);
if (t != null) {
throw t;
}
}
public static IllegalArgumentException checkStateOrReturnException(boolean value, String argumentName, String message) {
if (value) {
return null;
}
return new IllegalArgumentException(String.format("argumentName: %s, message: %s", argumentName, message));
}
public static IllegalArgumentException checkStateOrReturnException(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (value) {
return null;
}
return new IllegalArgumentException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
private static NullPointerException checkNotNullOrReturnException(Object val, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (val != null) {
return null;
}
return new NullPointerException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
public static BadRequestException checkRequestOrReturnException(boolean value, String argumentName, String message) {
if (value) {
return null;
}
return new BadRequestException(String.format("argumentName: %s, message: %s", argumentName, message));
}
public static BadRequestException checkRequestOrReturnException(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (value) {
return null;
}
return new BadRequestException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
@SuppressWarnings("unchecked")
public static <O, I> O as(I i, Class<O> klass) {
if (i == null) {
return null;
}
if (klass.isInstance(i)) {
return (O) i;
} else {
return null;
}
}
@SuppressWarnings("unchecked")
public static <V> List<V> immutableListOf() {
return Collections.EMPTY_LIST;
}
public static <V> List<V> immutableListOf(V v1) {
List<V> list = new ArrayList<>();
list.add(v1);
return Collections.unmodifiableList(list);
}
public static <K, V> Map<K, V>immutableMapOf() {
return Collections.emptyMap();
}
public static <K, V> Map<K, V>immutableMapOf(K k1, V v1) {
Map<K, V> map = new HashMap<K ,V>();
map.put(k1, v1);
map = Collections.unmodifiableMap(map);
return map;
}
public static <V> V firstOrDefault(List<V> list) {
return list.size() > 0? list.get(0) : null ;
}
public static class ValueHolder<V> {
public ValueHolder() {
}
public ValueHolder(V v) {
this.v = v;
}
public V v;
public static <T> ValueHolder<T> initialize(T v) {
return new ValueHolder<T>(v);
}
}
public static <K, V> boolean tryGetValue(Map<K, V> dictionary, K key, ValueHolder<V> holder) {
holder.v = dictionary.get(key);
return holder.v != null;
}
public static <K, V> boolean tryRemove(Map<K, V> dictionary, K key, ValueHolder<V> holder) {
holder.v = dictionary.remove(key);
return holder.v != null;
}
public static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) {
if (StringUtils.isEmpty(itemResponseBodyAsString)) {
return null;
}
try {
return getSimpleObjectMapper().readValue(itemResponseBodyAsString, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Failed to parse string [%s] to POJO.", itemResponseBodyAsString, e));
}
}
public static <T> T parse(byte[] item, Class<T> itemClassType) {
if (Utils.isEmpty(item)) {
return null;
}
try {
return getSimpleObjectMapper().readValue(item, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Failed to parse byte-array %s to POJO.", Arrays.toString(item)), e);
}
}
public static ByteBuffer serializeJsonToByteBuffer(ObjectMapper objectMapper, Object object) {
try {
ByteBufferOutputStream byteBufferOutputStream = new ByteBufferOutputStream(ONE_KB);
objectMapper.writeValue(byteBufferOutputStream, object);
return byteBufferOutputStream.asByteBuffer();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to serialize the object into json", e);
}
}
public static boolean isEmpty(byte[] bytes) {
return bytes == null || bytes.length == 0;
}
public static String utf8StringFromOrNull(byte[] bytes) {
if (bytes == null) {
return null;
}
return new String(bytes, StandardCharsets.UTF_8);
}
public static void setContinuationTokenAndMaxItemCount(CosmosPagedFluxOptions pagedFluxOptions, CosmosQueryRequestOptions cosmosQueryRequestOptions) {
if (pagedFluxOptions == null) {
return;
}
if (pagedFluxOptions.getRequestContinuation() != null) {
ModelBridgeInternal.setQueryRequestOptionsContinuationToken(cosmosQueryRequestOptions, pagedFluxOptions.getRequestContinuation());
}
if (pagedFluxOptions.getMaxItemCount() != null) {
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(cosmosQueryRequestOptions, pagedFluxOptions.getMaxItemCount());
}
}
static String escapeNonAscii(String partitionKeyJson) {
StringBuilder sb = null;
for (int i = 0; i < partitionKeyJson.length(); i++) {
int val = partitionKeyJson.charAt(i);
if (val > 127) {
if (sb == null) {
sb = new StringBuilder(partitionKeyJson.length());
sb.append(partitionKeyJson, 0, i);
}
sb.append("\\u").append(String.format("%04X", val));
} else {
if (sb != null) {
sb.append(partitionKeyJson.charAt(i));
}
}
}
if (sb == null) {
return partitionKeyJson;
} else {
return sb.toString();
}
}
static byte[] toByteArray(ByteBuf buf) {
byte[] bytes = new byte[buf.readableBytes()];
buf.readBytes(bytes);
return bytes;
}
public static String toJson(ObjectMapper mapper, ObjectNode object) {
try {
return mapper.writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new IllegalStateException("Unable to convert JSON to STRING", e);
}
}
public static byte[] serializeObjectToByteArray(Object obj) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(out);
os.writeObject(obj);
return out.toByteArray();
}
} | class Utils {
private final static Logger logger = LoggerFactory.getLogger(Utils.class);
private static final int ONE_KB = 1024;
private static final ZoneId GMT_ZONE_ID = ZoneId.of("GMT");
public static final Base64.Encoder Base64Encoder = Base64.getEncoder();
public static final Base64.Decoder Base64Decoder = Base64.getDecoder();
private static final ObjectMapper simpleObjectMapper = new ObjectMapper();
private static final TimeBasedGenerator TIME_BASED_GENERATOR =
Generators.timeBasedGenerator(EthernetAddress.constructMulticastAddress());
private static final DateTimeFormatter RFC_1123_DATE_TIME = DateTimeFormatter.ofPattern("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US);
static {
Utils.simpleObjectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Utils.simpleObjectMapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
Utils.simpleObjectMapper.configure(JsonParser.Feature.ALLOW_TRAILING_COMMA, true);
Utils.simpleObjectMapper.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
Utils.simpleObjectMapper.configure(DeserializationFeature.ACCEPT_FLOAT_AS_INT, false);
Utils.simpleObjectMapper.registerModule(new AfterburnerModule());
}
public static byte[] getUTF8BytesOrNull(String str) {
if (str == null) {
return null;
}
return str.getBytes(StandardCharsets.UTF_8);
}
public static byte[] getUTF8Bytes(String str) {
return str.getBytes(StandardCharsets.UTF_8);
}
public static String encodeBase64String(byte[] binaryData) {
String encodedString = Base64Encoder.encodeToString(binaryData);
if (encodedString.endsWith("\r\n")) {
encodedString = encodedString.substring(0, encodedString.length() - 2);
}
return encodedString;
}
/**
* Checks whether the specified link is Name based or not
*
* @param link the link to analyze.
* @return true or false
*/
public static boolean isNameBased(String link) {
if (StringUtils.isEmpty(link)) {
return false;
}
if (link.startsWith("/") && link.length() > 1) {
link = link.substring(1);
}
String[] parts = StringUtils.split(link, "/");
if (parts.length == 0 || StringUtils.isEmpty(parts[0])
|| !parts[0].equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) {
return false;
}
if (parts.length < 2 || StringUtils.isEmpty(parts[1])) {
return false;
}
String databaseID = parts[1];
if (databaseID.length() != 8) {
return true;
}
byte[] buffer = ResourceId.fromBase64String(databaseID);
if (buffer.length != 4) {
return true;
}
return false;
}
/**
* Checks whether the specified link is a Database Self Link or a Database
* ID based link
*
* @param link the link to analyze.
* @return true or false
*/
public static boolean isDatabaseLink(String link) {
if (StringUtils.isEmpty(link)) {
return false;
}
link = trimBeginningAndEndingSlashes(link);
String[] parts = StringUtils.split(link, "/");
if (parts.length != 2) {
return false;
}
if (StringUtils.isEmpty(parts[0]) || !parts[0].equalsIgnoreCase(Paths.DATABASES_PATH_SEGMENT)) {
return false;
}
if (StringUtils.isEmpty(parts[1])) {
return false;
}
return true;
}
/**
* Joins the specified paths by appropriately padding them with '/'
*
* @param path1 the first path segment to join.
* @param path2 the second path segment to join.
* @return the concatenated path with '/'
*/
public static String joinPath(String path1, String path2) {
path1 = trimBeginningAndEndingSlashes(path1);
String result = "/" + path1 + "/";
if (!StringUtils.isEmpty(path2)) {
path2 = trimBeginningAndEndingSlashes(path2);
result += path2 + "/";
}
return result;
}
/**
* Trims the beginning and ending '/' from the given path
*
* @param path the path to trim for beginning and ending slashes
* @return the path without beginning and ending '/'
*/
public static String trimBeginningAndEndingSlashes(String path) {
if(path == null) {
return null;
}
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path;
}
public static Map<String, String> paramEncode(Map<String, String> queryParams) {
HashMap<String, String> map = new HashMap<>();
for(Map.Entry<String, String> paramEntry: queryParams.entrySet()) {
try {
map.put(paramEntry.getKey(), URLEncoder.encode(paramEntry.getValue(), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException(e);
}
}
return map;
}
public static String createQuery(Map<String, String> queryParameters) {
if (queryParameters == null)
return "";
StringBuilder queryString = new StringBuilder();
for (Map.Entry<String, String> nameValuePair : queryParameters.entrySet()) {
String key = nameValuePair.getKey();
String value = nameValuePair.getValue();
if (key != null && !key.isEmpty()) {
if (queryString.length() > 0) {
queryString.append(RuntimeConstants.Separators.Query[1]);
}
queryString.append(key);
if (value != null) {
queryString.append(RuntimeConstants.Separators.Query[2]);
queryString.append(value);
}
}
}
return queryString.toString();
}
public static URI setQuery(String urlString, String query) {
if (urlString == null)
throw new IllegalStateException("urlString parameter can't be null.");
query = Utils.removeLeadingQuestionMark(query);
try {
if (query != null && !query.isEmpty()) {
return new URI(Utils.addTrailingSlash(urlString) + RuntimeConstants.Separators.Query[0] + query);
} else {
return new URI(Utils.addTrailingSlash(urlString));
}
} catch (URISyntaxException e) {
throw new IllegalStateException("Uri is invalid: ", e);
}
}
/**
* Given the full path to a resource, extract the collection path.
*
* @param resourceFullName the full path to the resource.
* @return the path of the collection in which the resource is.
*/
public static String getCollectionName(String resourceFullName) {
if (resourceFullName != null) {
resourceFullName = Utils.trimBeginningAndEndingSlashes(resourceFullName);
int slashCount = 0;
for (int i = 0; i < resourceFullName.length(); i++) {
if (resourceFullName.charAt(i) == '/') {
slashCount++;
if (slashCount == 4) {
return resourceFullName.substring(0, i);
}
}
}
}
return resourceFullName;
}
public static <T> int getCollectionSize(Collection<T> collection) {
if (collection == null) {
return 0;
}
return collection.size();
}
public static Boolean isCollectionPartitioned(DocumentCollection collection) {
if (collection == null) {
throw new IllegalArgumentException("collection");
}
return collection.getPartitionKey() != null
&& collection.getPartitionKey().getPaths() != null
&& collection.getPartitionKey().getPaths().size() > 0;
}
public static boolean isCollectionChild(ResourceType type) {
return type == ResourceType.Document || type == ResourceType.Attachment || type == ResourceType.Conflict
|| type == ResourceType.StoredProcedure || type == ResourceType.Trigger || type == ResourceType.UserDefinedFunction;
}
public static boolean isWriteOperation(OperationType operationType) {
return operationType == OperationType.Create || operationType == OperationType.Upsert || operationType == OperationType.Delete || operationType == OperationType.Replace
|| operationType == OperationType.ExecuteJavaScript;
}
public static boolean isFeedRequest(OperationType requestOperationType) {
return requestOperationType == OperationType.Create ||
requestOperationType == OperationType.Upsert ||
requestOperationType == OperationType.ReadFeed ||
requestOperationType == OperationType.Query ||
requestOperationType == OperationType.SqlQuery ||
requestOperationType == OperationType.HeadFeed;
}
private static String addTrailingSlash(String path) {
if (path == null || path.isEmpty())
path = new String(RuntimeConstants.Separators.Url);
else if (path.charAt(path.length() - 1) != RuntimeConstants.Separators.Url[0])
path = path + RuntimeConstants.Separators.Url[0];
return path;
}
private static String removeLeadingQuestionMark(String path) {
if (path == null || path.isEmpty())
return path;
if (path.charAt(0) == RuntimeConstants.Separators.Query[0])
return path.substring(1);
return path;
}
public static boolean isValidConsistency(ConsistencyLevel backendConsistency,
ConsistencyLevel desiredConsistency) {
switch (backendConsistency) {
case STRONG:
return desiredConsistency == ConsistencyLevel.STRONG ||
desiredConsistency == ConsistencyLevel.BOUNDED_STALENESS ||
desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
case BOUNDED_STALENESS:
return desiredConsistency == ConsistencyLevel.BOUNDED_STALENESS ||
desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
case SESSION:
case EVENTUAL:
case CONSISTENT_PREFIX:
return desiredConsistency == ConsistencyLevel.SESSION ||
desiredConsistency == ConsistencyLevel.EVENTUAL ||
desiredConsistency == ConsistencyLevel.CONSISTENT_PREFIX;
default:
throw new IllegalArgumentException("backendConsistency");
}
}
public static String getUserAgent() {
return getUserAgent(HttpConstants.Versions.SDK_NAME, HttpConstants.Versions.SDK_VERSION);
}
public static String getUserAgent(String sdkName, String sdkVersion) {
String osName = System.getProperty("os.name");
if (osName == null) {
osName = "Unknown";
}
osName = osName.replaceAll("\\s", "");
String userAgent = String.format("%s/%s JRE/%s %s/%s",
osName,
System.getProperty("os.version"),
System.getProperty("java.version"),
sdkName,
sdkVersion);
return userAgent;
}
public static ObjectMapper getSimpleObjectMapper() {
return Utils.simpleObjectMapper;
}
/**
* Returns Current Time in RFC 1123 format, e.g,
* Fri, 01 Dec 2017 19:22:30 GMT.
*
* @return an instance of STRING
*/
public static String nowAsRFC1123() {
ZonedDateTime now = ZonedDateTime.now(GMT_ZONE_ID);
return Utils.RFC_1123_DATE_TIME.format(now);
}
public static UUID randomUUID() {
return TIME_BASED_GENERATOR.generate();
}
public static String zonedDateTimeAsUTCRFC1123(OffsetDateTime offsetDateTime){
return Utils.RFC_1123_DATE_TIME.format(offsetDateTime.atZoneSameInstant(GMT_ZONE_ID));
}
public static int getValueOrDefault(Integer val, int defaultValue) {
return val != null ? val.intValue() : defaultValue;
}
public static void checkStateOrThrow(boolean value, String argumentName, String message) throws IllegalArgumentException {
IllegalArgumentException t = checkStateOrReturnException(value, argumentName, message);
if (t != null) {
throw t;
}
}
public static void checkNotNullOrThrow(Object val, String argumentName, String message) throws NullPointerException {
NullPointerException t = checkNotNullOrReturnException(val, argumentName, message);
if (t != null) {
throw t;
}
}
public static void checkStateOrThrow(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) throws IllegalArgumentException {
IllegalArgumentException t = checkStateOrReturnException(value, argumentName, argumentName, messageTemplateParams);
if (t != null) {
throw t;
}
}
public static IllegalArgumentException checkStateOrReturnException(boolean value, String argumentName, String message) {
if (value) {
return null;
}
return new IllegalArgumentException(String.format("argumentName: %s, message: %s", argumentName, message));
}
public static IllegalArgumentException checkStateOrReturnException(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (value) {
return null;
}
return new IllegalArgumentException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
private static NullPointerException checkNotNullOrReturnException(Object val, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (val != null) {
return null;
}
return new NullPointerException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
public static BadRequestException checkRequestOrReturnException(boolean value, String argumentName, String message) {
if (value) {
return null;
}
return new BadRequestException(String.format("argumentName: %s, message: %s", argumentName, message));
}
public static BadRequestException checkRequestOrReturnException(boolean value, String argumentName, String messageTemplate, Object... messageTemplateParams) {
if (value) {
return null;
}
return new BadRequestException(String.format("argumentName: %s, message: %s", argumentName, String.format(messageTemplate, messageTemplateParams)));
}
@SuppressWarnings("unchecked")
public static <O, I> O as(I i, Class<O> klass) {
if (i == null) {
return null;
}
if (klass.isInstance(i)) {
return (O) i;
} else {
return null;
}
}
@SuppressWarnings("unchecked")
public static <V> List<V> immutableListOf() {
return Collections.EMPTY_LIST;
}
public static <V> List<V> immutableListOf(V v1) {
List<V> list = new ArrayList<>();
list.add(v1);
return Collections.unmodifiableList(list);
}
public static <K, V> Map<K, V>immutableMapOf() {
return Collections.emptyMap();
}
public static <K, V> Map<K, V>immutableMapOf(K k1, V v1) {
Map<K, V> map = new HashMap<K ,V>();
map.put(k1, v1);
map = Collections.unmodifiableMap(map);
return map;
}
public static <V> V firstOrDefault(List<V> list) {
return list.size() > 0? list.get(0) : null ;
}
public static class ValueHolder<V> {
public ValueHolder() {
}
public ValueHolder(V v) {
this.v = v;
}
public V v;
public static <T> ValueHolder<T> initialize(T v) {
return new ValueHolder<T>(v);
}
}
public static <K, V> boolean tryGetValue(Map<K, V> dictionary, K key, ValueHolder<V> holder) {
holder.v = dictionary.get(key);
return holder.v != null;
}
public static <K, V> boolean tryRemove(Map<K, V> dictionary, K key, ValueHolder<V> holder) {
holder.v = dictionary.remove(key);
return holder.v != null;
}
public static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) {
if (StringUtils.isEmpty(itemResponseBodyAsString)) {
return null;
}
try {
return getSimpleObjectMapper().readValue(itemResponseBodyAsString, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Failed to parse string [%s] to POJO.", itemResponseBodyAsString, e));
}
}
public static <T> T parse(byte[] item, Class<T> itemClassType) {
if (Utils.isEmpty(item)) {
return null;
}
try {
return getSimpleObjectMapper().readValue(item, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Failed to parse byte-array %s to POJO.", Arrays.toString(item)), e);
}
}
public static ByteBuffer serializeJsonToByteBuffer(ObjectMapper objectMapper, Object object) {
try {
ByteBufferOutputStream byteBufferOutputStream = new ByteBufferOutputStream(ONE_KB);
objectMapper.writeValue(byteBufferOutputStream, object);
return byteBufferOutputStream.asByteBuffer();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to serialize the object into json", e);
}
}
public static boolean isEmpty(byte[] bytes) {
return bytes == null || bytes.length == 0;
}
public static String utf8StringFromOrNull(byte[] bytes) {
if (bytes == null) {
return null;
}
return new String(bytes, StandardCharsets.UTF_8);
}
public static void setContinuationTokenAndMaxItemCount(CosmosPagedFluxOptions pagedFluxOptions, CosmosQueryRequestOptions cosmosQueryRequestOptions) {
if (pagedFluxOptions == null) {
return;
}
if (pagedFluxOptions.getRequestContinuation() != null) {
ModelBridgeInternal.setQueryRequestOptionsContinuationToken(cosmosQueryRequestOptions, pagedFluxOptions.getRequestContinuation());
}
if (pagedFluxOptions.getMaxItemCount() != null) {
ModelBridgeInternal.setQueryRequestOptionsMaxItemCount(cosmosQueryRequestOptions, pagedFluxOptions.getMaxItemCount());
}
}
static String escapeNonAscii(String partitionKeyJson) {
StringBuilder sb = null;
for (int i = 0; i < partitionKeyJson.length(); i++) {
int val = partitionKeyJson.charAt(i);
if (val > 127) {
if (sb == null) {
sb = new StringBuilder(partitionKeyJson.length());
sb.append(partitionKeyJson, 0, i);
}
sb.append("\\u").append(String.format("%04X", val));
} else {
if (sb != null) {
sb.append(partitionKeyJson.charAt(i));
}
}
}
if (sb == null) {
return partitionKeyJson;
} else {
return sb.toString();
}
}
static byte[] toByteArray(ByteBuf buf) {
byte[] bytes = new byte[buf.readableBytes()];
buf.readBytes(bytes);
return bytes;
}
public static String toJson(ObjectMapper mapper, ObjectNode object) {
try {
return mapper.writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new IllegalStateException("Unable to convert JSON to STRING", e);
}
}
public static byte[] serializeObjectToByteArray(Object obj) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(out);
os.writeObject(obj);
return out.toByteArray();
}
} |
NIT: final | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | String sessionTokenLsn = feedResponse.getSessionToken(); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
Same for several of the local variables below | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | String sessionTokenLsn = feedResponse.getSessionToken(); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
using == instead of .equals()? | public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
}
});
} | if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) { | public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl).flatMap(
recognizePollingOperation -> recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult -> contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult -> contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult -> contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult -> contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
}
});
}
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
} |
same to rest of files. Might be need to revisit CodeSnippets files again | public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
}
});
} | if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) { | public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).flatMap(
recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult()).subscribe(recognizedForms ->
recognizedForms.forEach(recognizedForm -> {
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
});
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl).flatMap(
recognizePollingOperation -> recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult -> contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult -> contentPageResult.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult -> contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(contentPageResult -> contentPageResult.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(recognizePollingOperation ->
recognizePollingOperation.getFinalResult())
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
}
});
}
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
} |
perfect place to use stream() method here. Great. | public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
for (RecognizedForm recognizedForm : formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantNameField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
} | .filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType()) | public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
for (RecognizedForm recognizedForm : formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
}
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm ->
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String analyzeFilePath = "{file_source_url}";
String modelId = "{model_id}";
boolean includeFieldElements = true;
formRecognizerClient.beginRecognizeCustomFormsFromUrl(analyzeFilePath, modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formPath = "{file_source_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formPath,
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length()).getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(),
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receipt_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{receipt_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length()).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String analyzeFilePath = "{file_source_url}";
String modelId = "{model_id}";
boolean includeFieldElements = true;
formRecognizerClient.beginRecognizeCustomFormsFromUrl(analyzeFilePath, modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId)
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl)
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formPath = "{file_source_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formPath,
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeContent(targetStream, form.length())
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
for (FormPage formPage : formRecognizerClient.beginRecognizeContent(targetStream, form.length(),
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
}
}
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP == receiptItem.getValueType()) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receipt_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{receipt_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length()).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
}
/**
* Code snippet for {@link FormRecognizerClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
is possible to use stream again and filter here? | public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
for (RecognizedForm recognizedForm : formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantNameField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
} | if ("Quantity".equals(key)) { | public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
for (RecognizedForm recognizedForm : formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
}
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm ->
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String analyzeFilePath = "{file_source_url}";
String modelId = "{model_id}";
boolean includeFieldElements = true;
formRecognizerClient.beginRecognizeCustomFormsFromUrl(analyzeFilePath, modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formPath = "{file_source_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formPath,
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length()).getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(),
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receipt_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{receipt_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length()).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String analyzeFilePath = "{file_source_url}";
String modelId = "{model_id}";
boolean includeFieldElements = true;
formRecognizerClient.beginRecognizeCustomFormsFromUrl(analyzeFilePath, modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId)
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl)
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formPath = "{file_source_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formPath,
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeContent(targetStream, form.length())
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
for (FormPage formPage : formRecognizerClient.beginRecognizeContent(targetStream, form.length(),
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
}
}
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP == receiptItem.getValueType()) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receipt_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{receipt_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length()).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
}
/**
* Code snippet for {@link FormRecognizerClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
I guess I can. Just wanted to keep a method showing the non-stream way but thought that would be inconsistent. So kept all the sample files using `forEach`. I don't have a strong preference, what do you think, update all of them? | public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
for (RecognizedForm recognizedForm : formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantNameField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
} | if ("Quantity".equals(key)) { | public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
for (RecognizedForm recognizedForm : formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
}
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm ->
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String analyzeFilePath = "{file_source_url}";
String modelId = "{model_id}";
boolean includeFieldElements = true;
formRecognizerClient.beginRecognizeCustomFormsFromUrl(analyzeFilePath, modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formPath = "{file_source_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formPath,
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length()).getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(),
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receipt_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{receipt_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length()).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String analyzeFilePath = "{file_source_url}";
String modelId = "{model_id}";
boolean includeFieldElements = true;
formRecognizerClient.beginRecognizeCustomFormsFromUrl(analyzeFilePath, modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId)
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl)
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formPath = "{file_source_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formPath,
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeContent(targetStream, form.length())
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
for (FormPage formPage : formRecognizerClient.beginRecognizeContent(targetStream, form.length(),
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
}
}
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP == receiptItem.getValueType()) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receipt_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{receipt_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length()).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
}
/**
* Code snippet for {@link FormRecognizerClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
Personally I like to using stream() if possible. | public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
for (RecognizedForm recognizedForm : formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantNameField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
} | if ("Quantity".equals(key)) { | public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(receipt.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
for (RecognizedForm recognizedForm : formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
}
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.forEach(recognizedForm ->
recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String analyzeFilePath = "{file_source_url}";
String modelId = "{model_id}";
boolean includeFieldElements = true;
formRecognizerClient.beginRecognizeCustomFormsFromUrl(analyzeFilePath, modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId).getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.forEach(recognizedForm -> recognizedForm.getFields().forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl).getFinalResult().forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formPath = "{file_source_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formPath,
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %s%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length()).getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeContent(targetStream, form.length(),
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(recognizedForm -> {
System.out.printf("Page Angle: %f%n", recognizedForm.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", recognizedForm.getUnit());
System.out.println("Recognized Tables: ");
recognizedForm.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receipt_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP.equals(receiptItem.getValueType())) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{receipt_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length()).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING.equals(merchantNameField.getValueType())) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER.equals(merchantNameField.getValueType())) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE.equals(transactionDateField.getValueType())) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST.equals(receiptItemsField.getValueType())) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE.equals(formField.getValueType())) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %d, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class FormRecognizerClientJavaDocCodeSnippets {
private FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder().buildClient();
/**
* Code snippet for creating a {@link FormRecognizerClient}
*/
public void createFormRecognizerClient() {
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for creating a {@link FormRecognizerClient} with pipeline
*/
public void createFormRecognizerClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerClient formRecognizerClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId).getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String analyzeFilePath = "{file_source_url}";
String modelId = "{model_id}";
boolean includeFieldElements = true;
formRecognizerClient.beginRecognizeCustomFormsFromUrl(analyzeFilePath, modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId)
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
}
/**
* Code snippet for
* {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeCustomForms(targetStream, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(10)))
.getFinalResult()
.stream()
.map(RecognizedForm::getFields)
.forEach(formFieldMap -> formFieldMap.forEach((fieldText, formField) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", formField.getValue());
System.out.printf("Confidence score: %.2f%n", formField.getConfidence());
}));
}
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{form_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formUrl)
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formPath = "{file_source_url}";
formRecognizerClient.beginRecognizeContentFromUrl(formPath,
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.pdf}");
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeContent(targetStream, form.length())
.getFinalResult()
.forEach(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
});
}
}
/**
* Code snippet for {@link FormRecognizerClient
* options.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{file_source_url}");
byte[] fileContent = Files.readAllBytes(form.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
for (FormPage formPage : formRecognizerClient.beginRecognizeContent(targetStream, form.length(),
new RecognizeOptions()
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult()) {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables()
.stream()
.flatMap(formTable -> formTable.getCells().stream())
.forEach(recognizedTableCell -> System.out.printf("%s ", recognizedTableCell.getText()));
}
}
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{file_source_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.forEach(receiptItem -> {
if (FieldValueType.MAP == receiptItem.getValueType()) {
Map<String, FormField<?>> formFieldMap = FieldValueType.MAP.cast(receiptItem);
formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
});
}
});
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receipt_url}";
formRecognizerClient.beginRecognizeReceiptsFromUrl(receiptUrl).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
/**
* Code snippet for {@link FormRecognizerClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{receipt_url}");
byte[] fileContent = Files.readAllBytes(receipt.toPath());
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
formRecognizerClient.beginRecognizeReceipts(targetStream, receipt.length()).getFinalResult()
.forEach(recognizedReceipt -> {
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
});
}
}
/**
* Code snippet for {@link FormRecognizerClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
good catch | public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
} | if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) { | public void beginRecognizeReceiptsFromUrl() {
String receiptUrl = "{receiptUrl}";
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl)
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
} | class FormRecognizerAsyncClientJavaDocCodeSnippets {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder().buildAsyncClient();
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient}
*/
public void createFormRecognizerAsyncClient() {
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormRecognizerAsyncClient} with pipeline
*/
public void createFormRecognizerAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormRecognizerAsyncClient formRecognizerAsyncClient = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrl() {
String formUrl = "{form_url}";
String modelId = "{custom_trained_model_id}";
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
*/
public void beginRecognizeCustomFormsFromUrlWithOptions() {
String formUrl = "{formUrl}";
String modelId = "{model_id}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(10)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomForms() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for
* {@link FormRecognizerAsyncClient
* with options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeCustomFormsWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
String modelId = "{custom_trained_model_id}";
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeCustomForms(buffer, form.length(), modelId,
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(recognizedForm -> recognizedForm.getFields()
.forEach((fieldText, fieldValue) -> {
System.out.printf("Field text: %s%n", fieldText);
System.out.printf("Field value: %s%n", fieldValue.getValue());
System.out.printf("Confidence score: %.2f%n", fieldValue.getConfidence());
}));
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeContentFromUrl() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl)
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*/
public void beginRecognizeContentFromUrlWithOptions() {
String formUrl = "{formUrl}";
formRecognizerAsyncClient.beginRecognizeContentFromUrl(formUrl,
new RecognizeOptions().setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContent() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length())
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable ->
formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeContentWithOptions() throws IOException {
File form = new File("{local/file_path/fileName.jpg}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(form.toPath())));
formRecognizerAsyncClient.beginRecognizeContent(buffer, form.length(),
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.flatMap(Flux::fromIterable)
.subscribe(formPage -> {
System.out.printf("Page Angle: %s%n", formPage.getTextAngle());
System.out.printf("Page Dimension unit: %s%n", formPage.getUnit());
System.out.println("Recognized Tables: ");
formPage.getTables().forEach(formTable -> formTable.getCells().forEach(recognizedTableCell ->
System.out.printf("%s ", recognizedTableCell.getText())));
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
/**
* Code snippet for {@link FormRecognizerAsyncClient
*/
public void beginRecognizeReceiptsFromUrlWithOptions() {
String receiptUrl = "{receiptUrl}";
boolean includeTextContent = true;
formRecognizerAsyncClient.beginRecognizeReceiptsFromUrl(receiptUrl,
new RecognizeOptions()
.setIncludeFieldElements(includeTextContent)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedReceipt = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedReceipt.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceipts() throws IOException {
File receipt = new File("{file_source_url}");
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length())
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
/**
* Code snippet for {@link FormRecognizerAsyncClient
* options
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
public void beginRecognizeReceiptsWithOptions() throws IOException {
File receipt = new File("{local/file_path/fileName.jpg}");
boolean includeFieldElements = true;
Flux<ByteBuffer> buffer = toFluxByteBuffer(new ByteArrayInputStream(Files.readAllBytes(receipt.toPath())));
formRecognizerAsyncClient.beginRecognizeReceipts(buffer, receipt.length(),
new RecognizeOptions()
.setContentType(FormContentType.IMAGE_JPEG)
.setIncludeFieldElements(includeFieldElements)
.setPollInterval(Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(recognizedReceipts -> {
for (int i = 0; i < recognizedReceipts.size(); i++) {
RecognizedForm recognizedForm = recognizedReceipts.get(i);
Map<String, FormField<?>> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized Receipt page %d -----------%n", i);
FormField<?> merchantNameField = recognizedFields.get("MerchantName");
if (merchantNameField != null) {
if (FieldValueType.STRING == merchantNameField.getValueType()) {
String merchantName = FieldValueType.STRING.cast(merchantNameField);
System.out.printf("Merchant Name: %s, confidence: %.2f%n",
merchantName, merchantNameField.getConfidence());
}
}
FormField<?> merchantPhoneNumberField = recognizedFields.get("MerchantPhoneNumber");
if (merchantPhoneNumberField != null) {
if (FieldValueType.PHONE_NUMBER == merchantPhoneNumberField.getValueType()) {
String merchantAddress = FieldValueType.PHONE_NUMBER.cast(merchantPhoneNumberField);
System.out.printf("Merchant Phone number: %s, confidence: %.2f%n",
merchantAddress, merchantPhoneNumberField.getConfidence());
}
}
FormField<?> transactionDateField = recognizedFields.get("TransactionDate");
if (transactionDateField != null) {
if (FieldValueType.DATE == transactionDateField.getValueType()) {
LocalDate transactionDate = FieldValueType.DATE.cast(transactionDateField);
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
transactionDate, transactionDateField.getConfidence());
}
}
FormField<?> receiptItemsField = recognizedFields.get("Items");
if (receiptItemsField != null) {
System.out.printf("Receipt Items: %n");
if (FieldValueType.LIST == receiptItemsField.getValueType()) {
List<FormField<?>> receiptItems = FieldValueType.LIST.cast(receiptItemsField);
receiptItems.stream()
.filter(receiptItem -> FieldValueType.MAP == receiptItem.getValueType())
.<Map<String, FormField<?>>>map(FieldValueType.MAP::cast)
.forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> {
if ("Quantity".equals(key)) {
if (FieldValueType.DOUBLE == formField.getValueType()) {
Float quantity = FieldValueType.DOUBLE.cast(formField);
System.out.printf("Quantity: %f, confidence: %.2f%n",
quantity, formField.getConfidence());
}
}
}));
}
}
}
});
}
} |
NIT: you are using two space here, but should be one. | public static void main(String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
List<RecognizedForm> formsWithLabeledModel =
client.beginRecognizeCustomForms(
new FileInputStream(analyzeFile), analyzeFile.length(),
"{labeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult();
List<RecognizedForm> formsWithUnlabeledModel =
client.beginRecognizeCustomForms(new FileInputStream(analyzeFile), analyzeFile.length(),
"{unlabeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult();
System.out.println("--------Recognizing forms with labeled custom model--------");
formsWithLabeledModel.forEach(labeledForm -> labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "MerchantName".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
}));
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
formsWithUnlabeledModel.forEach(unLabeledForm -> unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), "", formField.getConfidence());
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry -> System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
}));
} | System.out.printf("Field %s has label %s within bounding box %s with a confidence score " | public static void main(String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
List<RecognizedForm> formsWithLabeledModel =
client.beginRecognizeCustomForms(
new FileInputStream(analyzeFile), analyzeFile.length(),
"{labeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult();
List<RecognizedForm> formsWithUnlabeledModel =
client.beginRecognizeCustomForms(new FileInputStream(analyzeFile), analyzeFile.length(),
"{unlabeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)))
.getFinalResult();
System.out.println("--------Recognizing forms with labeled custom model--------");
formsWithLabeledModel.forEach(labeledForm -> labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "MerchantName".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
}));
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
formsWithUnlabeledModel.forEach(unLabeledForm -> unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), "", formField.getConfidence());
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry -> System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
}));
} | class AdvancedDiffLabeledUnlabeledData {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class AdvancedDiffLabeledUnlabeledData {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
nit: same here. extra space | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
byte[] fileContent = Files.readAllBytes(analyzeFile.toPath());
PollerFlux<OperationResult, List<RecognizedForm>> labeledCustomFormPoller =
client.beginRecognizeCustomForms(toFluxByteBuffer(new ByteArrayInputStream(fileContent)),
analyzeFile.length(), "{labeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)));
PollerFlux<OperationResult, List<RecognizedForm>> unlabeledCustomFormPoller =
client.beginRecognizeCustomForms(toFluxByteBuffer(new ByteArrayInputStream(fileContent)),
analyzeFile.length(), "{unlabeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)));
Mono<List<RecognizedForm>> labeledDataResult = labeledCustomFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
Mono<List<RecognizedForm>> unlabeledDataResult = unlabeledCustomFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
System.out.println("--------Recognizing forms with labeled custom model--------");
labeledDataResult.subscribe(formsWithLabeledModel -> formsWithLabeledModel.forEach(labeledForm ->
labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point -> String.format("[%.2f,"
+ " %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Merchant".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
})));
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
unlabeledDataResult.subscribe(recognizedForms -> recognizedForms.forEach(unLabeledForm ->
unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), "", formField.getConfidence());
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence "
+ "score of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
})));
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | System.out.printf("Field %s has label %s within bounding box %s with a confidence score " | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
byte[] fileContent = Files.readAllBytes(analyzeFile.toPath());
PollerFlux<OperationResult, List<RecognizedForm>> labeledCustomFormPoller =
client.beginRecognizeCustomForms(toFluxByteBuffer(new ByteArrayInputStream(fileContent)),
analyzeFile.length(), "{labeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)));
PollerFlux<OperationResult, List<RecognizedForm>> unlabeledCustomFormPoller =
client.beginRecognizeCustomForms(toFluxByteBuffer(new ByteArrayInputStream(fileContent)),
analyzeFile.length(), "{unlabeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)));
Mono<List<RecognizedForm>> labeledDataResult = labeledCustomFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
Mono<List<RecognizedForm>> unlabeledDataResult = unlabeledCustomFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
System.out.println("--------Recognizing forms with labeled custom model--------");
labeledDataResult.subscribe(formsWithLabeledModel -> formsWithLabeledModel.forEach(labeledForm ->
labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point -> String.format("[%.2f,"
+ " %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Merchant".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
})));
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
unlabeledDataResult.subscribe(recognizedForms -> recognizedForms.forEach(unLabeledForm ->
unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), "", formField.getConfidence());
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence "
+ "score of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
})));
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class AdvancedDiffLabeledUnlabeledDataAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class AdvancedDiffLabeledUnlabeledDataAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
like other change, change to flatMap()? | public void beginTraining() {
String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}";
boolean useTrainingLabels = true;
formTrainingAsyncClient.beginTraining(trainingFilesUrl, useTrainingLabels)
.subscribe(trainingPollingOperation -> {
trainingPollingOperation.getFinalResult().subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
});
} | .subscribe(trainingPollingOperation -> { | public void beginTraining() {
String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}";
boolean useTrainingLabels = true;
formTrainingAsyncClient.beginTraining(trainingFilesUrl, useTrainingLabels)
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
} | class FormTrainingAsyncClientJavaDocCodeSnippets {
private FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder().buildAsyncClient();
/**
* Code snippet for {@link FormTrainingAsyncClient} initialization
*/
public void formTrainingAsyncClientInInitialization() {
FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder().buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormTrainingAsyncClient} with pipeline
*/
public void createFormTrainingAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
/**
* Code snippet for {@link FormTrainingAsyncClient
* with options
*/
public void beginTrainingWithOptions() {
String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}";
TrainingFileFilter trainingFileFilter = new TrainingFileFilter().setIncludeSubFolders(true).setPrefix("Invoice");
formTrainingAsyncClient.beginTraining(trainingFilesUrl, true, trainingFileFilter,
Duration.ofSeconds(5))
.subscribe(trainingPollingOperation -> {
trainingPollingOperation.getFinalResult()
.subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels().forEach(customFormSubmodel ->
customFormSubmodel.getFields().forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCustomModel() {
String modelId = "{model_id}";
formTrainingAsyncClient.getCustomModel(modelId).subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCustomModelWithResponse() {
String modelId = "{model_id}";
formTrainingAsyncClient.getCustomModelWithResponse(modelId).subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
CustomFormModel customFormModel = response.getValue();
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getAccountProperties() {
formTrainingAsyncClient.getAccountProperties()
.subscribe(accountProperties -> {
System.out.printf("Max number of models that can be trained for this account: %d%n",
accountProperties.getCustomModelLimit());
System.out.printf("Current count of trained custom models: %d%n",
accountProperties.getCustomModelCount());
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getAccountPropertiesWithResponse() {
formTrainingAsyncClient.getAccountPropertiesWithResponse()
.subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
AccountProperties accountProperties = response.getValue();
System.out.printf("Max number of models that can be trained for this account: %d%n",
accountProperties.getCustomModelLimit());
System.out.printf("Current count of trained custom models: %d%n",
accountProperties.getCustomModelCount());
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void deleteModel() {
String modelId = "{model_id}";
formTrainingAsyncClient.deleteModel(modelId)
.subscribe(ignored -> System.out.printf("Model Id: %s is deleted%n", modelId));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void deleteModelWithResponse() {
String modelId = "{model_id}";
formTrainingAsyncClient.deleteModelWithResponse(modelId)
.subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
System.out.printf("Model Id: %s is deleted.%n", modelId);
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void listCustomModels() {
formTrainingAsyncClient.listCustomModels()
.subscribe(customModel ->
System.out.printf("Model Id: %s, Model status: %s, Created on: %s, Last updated on: %s.%n",
customModel.getModelId(),
customModel.getStatus(),
customModel.getTrainingStartedOn(),
customModel.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void beginCopy() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
String copyModelId = "copy-model-Id";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.flatMapMany(copyAuthorization -> formTrainingAsyncClient.beginCopyModel(copyModelId, copyAuthorization))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModelInfo ->
System.out.printf("Copied model has model Id: %s, model status: %s, training started on: %s,"
+ " training completed on: %s.%n",
customFormModelInfo.getModelId(),
customFormModelInfo.getStatus(),
customFormModelInfo.getTrainingStartedOn(),
customFormModelInfo.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void beginCopyOverload() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
String copyModelId = "copy-model-Id";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.flatMapMany(copyAuthorization -> formTrainingAsyncClient.beginCopyModel(copyModelId, copyAuthorization,
Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModelInfo ->
System.out.printf("Copied model has model Id: %s, model status: %s, training started on: %s,"
+ "training completed on: %s.%n",
customFormModelInfo.getModelId(),
customFormModelInfo.getStatus(),
customFormModelInfo.getTrainingStartedOn(),
customFormModelInfo.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCopyAuthorization() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.subscribe(copyAuthorization ->
System.out.printf("Copy Authorization for model id: %s, access token: %s, expiration time: %s, "
+ "target resource Id; %s, target resource region: %s%n",
copyAuthorization.getModelId(),
copyAuthorization.getAccessToken(),
copyAuthorization.getExpiresOn(),
copyAuthorization.getResourceId(),
copyAuthorization.getResourceRegion()
));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCopyAuthorizationWithResponse() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
formTrainingAsyncClient.getCopyAuthorizationWithResponse(resourceId, resourceRegion)
.subscribe(copyAuthorization ->
System.out.printf("Copy Authorization response status: %s, for model id: %s, access token: %s, "
+ "expiration time: %s, target resource Id; %s, target resource region: %s%n",
copyAuthorization.getStatusCode(),
copyAuthorization.getValue().getModelId(),
copyAuthorization.getValue().getAccessToken(),
copyAuthorization.getValue().getExpiresOn(),
copyAuthorization.getValue().getResourceId(),
copyAuthorization.getValue().getResourceRegion()
));
}
} | class FormTrainingAsyncClientJavaDocCodeSnippets {
private FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder().buildAsyncClient();
/**
* Code snippet for {@link FormTrainingAsyncClient} initialization
*/
public void formTrainingAsyncClientInInitialization() {
FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder().buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormTrainingAsyncClient} with pipeline
*/
public void createFormTrainingAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
/**
* Code snippet for {@link FormTrainingAsyncClient
* with options
*/
public void beginTrainingWithOptions() {
String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}";
TrainingFileFilter trainingFileFilter = new TrainingFileFilter().setIncludeSubFolders(true).setPrefix("Invoice");
formTrainingAsyncClient.beginTraining(trainingFilesUrl, true, trainingFileFilter,
Duration.ofSeconds(5))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels().forEach(customFormSubmodel ->
customFormSubmodel.getFields().forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCustomModel() {
String modelId = "{model_id}";
formTrainingAsyncClient.getCustomModel(modelId).subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCustomModelWithResponse() {
String modelId = "{model_id}";
formTrainingAsyncClient.getCustomModelWithResponse(modelId).subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
CustomFormModel customFormModel = response.getValue();
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getAccountProperties() {
formTrainingAsyncClient.getAccountProperties()
.subscribe(accountProperties -> {
System.out.printf("Max number of models that can be trained for this account: %d%n",
accountProperties.getCustomModelLimit());
System.out.printf("Current count of trained custom models: %d%n",
accountProperties.getCustomModelCount());
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getAccountPropertiesWithResponse() {
formTrainingAsyncClient.getAccountPropertiesWithResponse()
.subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
AccountProperties accountProperties = response.getValue();
System.out.printf("Max number of models that can be trained for this account: %d%n",
accountProperties.getCustomModelLimit());
System.out.printf("Current count of trained custom models: %d%n",
accountProperties.getCustomModelCount());
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void deleteModel() {
String modelId = "{model_id}";
formTrainingAsyncClient.deleteModel(modelId)
.subscribe(ignored -> System.out.printf("Model Id: %s is deleted%n", modelId));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void deleteModelWithResponse() {
String modelId = "{model_id}";
formTrainingAsyncClient.deleteModelWithResponse(modelId)
.subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
System.out.printf("Model Id: %s is deleted.%n", modelId);
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void listCustomModels() {
formTrainingAsyncClient.listCustomModels()
.subscribe(customModel ->
System.out.printf("Model Id: %s, Model status: %s, Created on: %s, Last updated on: %s.%n",
customModel.getModelId(),
customModel.getStatus(),
customModel.getTrainingStartedOn(),
customModel.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void beginCopy() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
String copyModelId = "copy-model-Id";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.flatMapMany(copyAuthorization -> formTrainingAsyncClient.beginCopyModel(copyModelId, copyAuthorization))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModelInfo ->
System.out.printf("Copied model has model Id: %s, model status: %s, training started on: %s,"
+ " training completed on: %s.%n",
customFormModelInfo.getModelId(),
customFormModelInfo.getStatus(),
customFormModelInfo.getTrainingStartedOn(),
customFormModelInfo.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void beginCopyOverload() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
String copyModelId = "copy-model-Id";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.flatMapMany(copyAuthorization -> formTrainingAsyncClient.beginCopyModel(copyModelId, copyAuthorization,
Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModelInfo ->
System.out.printf("Copied model has model Id: %s, model status: %s, training started on: %s,"
+ "training completed on: %s.%n",
customFormModelInfo.getModelId(),
customFormModelInfo.getStatus(),
customFormModelInfo.getTrainingStartedOn(),
customFormModelInfo.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCopyAuthorization() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.subscribe(copyAuthorization ->
System.out.printf("Copy Authorization for model id: %s, access token: %s, expiration time: %s, "
+ "target resource Id; %s, target resource region: %s%n",
copyAuthorization.getModelId(),
copyAuthorization.getAccessToken(),
copyAuthorization.getExpiresOn(),
copyAuthorization.getResourceId(),
copyAuthorization.getResourceRegion()
));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCopyAuthorizationWithResponse() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
formTrainingAsyncClient.getCopyAuthorizationWithResponse(resourceId, resourceRegion)
.subscribe(copyAuthorization ->
System.out.printf("Copy Authorization response status: %s, for model id: %s, access token: %s, "
+ "expiration time: %s, target resource Id; %s, target resource region: %s%n",
copyAuthorization.getStatusCode(),
copyAuthorization.getValue().getModelId(),
copyAuthorization.getValue().getAccessToken(),
copyAuthorization.getValue().getExpiresOn(),
copyAuthorization.getValue().getResourceId(),
copyAuthorization.getValue().getResourceRegion()
));
}
} |
We din't want two subscribes nested so changed for others. | public void beginTraining() {
String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}";
boolean useTrainingLabels = true;
formTrainingAsyncClient.beginTraining(trainingFilesUrl, useTrainingLabels)
.subscribe(trainingPollingOperation -> {
trainingPollingOperation.getFinalResult().subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
});
} | .subscribe(trainingPollingOperation -> { | public void beginTraining() {
String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}";
boolean useTrainingLabels = true;
formTrainingAsyncClient.beginTraining(trainingFilesUrl, useTrainingLabels)
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
} | class FormTrainingAsyncClientJavaDocCodeSnippets {
private FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder().buildAsyncClient();
/**
* Code snippet for {@link FormTrainingAsyncClient} initialization
*/
public void formTrainingAsyncClientInInitialization() {
FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder().buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormTrainingAsyncClient} with pipeline
*/
public void createFormTrainingAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
/**
* Code snippet for {@link FormTrainingAsyncClient
* with options
*/
public void beginTrainingWithOptions() {
String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}";
TrainingFileFilter trainingFileFilter = new TrainingFileFilter().setIncludeSubFolders(true).setPrefix("Invoice");
formTrainingAsyncClient.beginTraining(trainingFilesUrl, true, trainingFileFilter,
Duration.ofSeconds(5))
.subscribe(trainingPollingOperation -> {
trainingPollingOperation.getFinalResult()
.subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels().forEach(customFormSubmodel ->
customFormSubmodel.getFields().forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCustomModel() {
String modelId = "{model_id}";
formTrainingAsyncClient.getCustomModel(modelId).subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCustomModelWithResponse() {
String modelId = "{model_id}";
formTrainingAsyncClient.getCustomModelWithResponse(modelId).subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
CustomFormModel customFormModel = response.getValue();
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getAccountProperties() {
formTrainingAsyncClient.getAccountProperties()
.subscribe(accountProperties -> {
System.out.printf("Max number of models that can be trained for this account: %d%n",
accountProperties.getCustomModelLimit());
System.out.printf("Current count of trained custom models: %d%n",
accountProperties.getCustomModelCount());
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getAccountPropertiesWithResponse() {
formTrainingAsyncClient.getAccountPropertiesWithResponse()
.subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
AccountProperties accountProperties = response.getValue();
System.out.printf("Max number of models that can be trained for this account: %d%n",
accountProperties.getCustomModelLimit());
System.out.printf("Current count of trained custom models: %d%n",
accountProperties.getCustomModelCount());
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void deleteModel() {
String modelId = "{model_id}";
formTrainingAsyncClient.deleteModel(modelId)
.subscribe(ignored -> System.out.printf("Model Id: %s is deleted%n", modelId));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void deleteModelWithResponse() {
String modelId = "{model_id}";
formTrainingAsyncClient.deleteModelWithResponse(modelId)
.subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
System.out.printf("Model Id: %s is deleted.%n", modelId);
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void listCustomModels() {
formTrainingAsyncClient.listCustomModels()
.subscribe(customModel ->
System.out.printf("Model Id: %s, Model status: %s, Created on: %s, Last updated on: %s.%n",
customModel.getModelId(),
customModel.getStatus(),
customModel.getTrainingStartedOn(),
customModel.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void beginCopy() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
String copyModelId = "copy-model-Id";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.flatMapMany(copyAuthorization -> formTrainingAsyncClient.beginCopyModel(copyModelId, copyAuthorization))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModelInfo ->
System.out.printf("Copied model has model Id: %s, model status: %s, training started on: %s,"
+ " training completed on: %s.%n",
customFormModelInfo.getModelId(),
customFormModelInfo.getStatus(),
customFormModelInfo.getTrainingStartedOn(),
customFormModelInfo.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void beginCopyOverload() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
String copyModelId = "copy-model-Id";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.flatMapMany(copyAuthorization -> formTrainingAsyncClient.beginCopyModel(copyModelId, copyAuthorization,
Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModelInfo ->
System.out.printf("Copied model has model Id: %s, model status: %s, training started on: %s,"
+ "training completed on: %s.%n",
customFormModelInfo.getModelId(),
customFormModelInfo.getStatus(),
customFormModelInfo.getTrainingStartedOn(),
customFormModelInfo.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCopyAuthorization() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.subscribe(copyAuthorization ->
System.out.printf("Copy Authorization for model id: %s, access token: %s, expiration time: %s, "
+ "target resource Id; %s, target resource region: %s%n",
copyAuthorization.getModelId(),
copyAuthorization.getAccessToken(),
copyAuthorization.getExpiresOn(),
copyAuthorization.getResourceId(),
copyAuthorization.getResourceRegion()
));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCopyAuthorizationWithResponse() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
formTrainingAsyncClient.getCopyAuthorizationWithResponse(resourceId, resourceRegion)
.subscribe(copyAuthorization ->
System.out.printf("Copy Authorization response status: %s, for model id: %s, access token: %s, "
+ "expiration time: %s, target resource Id; %s, target resource region: %s%n",
copyAuthorization.getStatusCode(),
copyAuthorization.getValue().getModelId(),
copyAuthorization.getValue().getAccessToken(),
copyAuthorization.getValue().getExpiresOn(),
copyAuthorization.getValue().getResourceId(),
copyAuthorization.getValue().getResourceRegion()
));
}
} | class FormTrainingAsyncClientJavaDocCodeSnippets {
private FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder().buildAsyncClient();
/**
* Code snippet for {@link FormTrainingAsyncClient} initialization
*/
public void formTrainingAsyncClientInInitialization() {
FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder().buildAsyncClient();
}
/**
* Code snippet for creating a {@link FormTrainingAsyncClient} with pipeline
*/
public void createFormTrainingAsyncClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
FormTrainingAsyncClient formTrainingAsyncClient = new FormTrainingClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildAsyncClient();
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
/**
* Code snippet for {@link FormTrainingAsyncClient
* with options
*/
public void beginTrainingWithOptions() {
String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}";
TrainingFileFilter trainingFileFilter = new TrainingFileFilter().setIncludeSubFolders(true).setPrefix("Invoice");
formTrainingAsyncClient.beginTraining(trainingFilesUrl, true, trainingFileFilter,
Duration.ofSeconds(5))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels().forEach(customFormSubmodel ->
customFormSubmodel.getFields().forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCustomModel() {
String modelId = "{model_id}";
formTrainingAsyncClient.getCustomModel(modelId).subscribe(customFormModel -> {
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCustomModelWithResponse() {
String modelId = "{model_id}";
formTrainingAsyncClient.getCustomModelWithResponse(modelId).subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
CustomFormModel customFormModel = response.getValue();
System.out.printf("Model Id: %s%n", customFormModel.getModelId());
System.out.printf("Model Status: %s%n", customFormModel.getModelStatus());
customFormModel.getSubmodels()
.forEach(customFormSubmodel -> customFormSubmodel.getFields()
.forEach((key, customFormModelField) ->
System.out.printf("Form Type: %s Field Text: %s Field Accuracy: %f%n",
key, customFormModelField.getName(), customFormModelField.getAccuracy())));
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getAccountProperties() {
formTrainingAsyncClient.getAccountProperties()
.subscribe(accountProperties -> {
System.out.printf("Max number of models that can be trained for this account: %d%n",
accountProperties.getCustomModelLimit());
System.out.printf("Current count of trained custom models: %d%n",
accountProperties.getCustomModelCount());
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getAccountPropertiesWithResponse() {
formTrainingAsyncClient.getAccountPropertiesWithResponse()
.subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
AccountProperties accountProperties = response.getValue();
System.out.printf("Max number of models that can be trained for this account: %d%n",
accountProperties.getCustomModelLimit());
System.out.printf("Current count of trained custom models: %d%n",
accountProperties.getCustomModelCount());
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void deleteModel() {
String modelId = "{model_id}";
formTrainingAsyncClient.deleteModel(modelId)
.subscribe(ignored -> System.out.printf("Model Id: %s is deleted%n", modelId));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void deleteModelWithResponse() {
String modelId = "{model_id}";
formTrainingAsyncClient.deleteModelWithResponse(modelId)
.subscribe(response -> {
System.out.printf("Response Status Code: %d.", response.getStatusCode());
System.out.printf("Model Id: %s is deleted.%n", modelId);
});
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void listCustomModels() {
formTrainingAsyncClient.listCustomModels()
.subscribe(customModel ->
System.out.printf("Model Id: %s, Model status: %s, Created on: %s, Last updated on: %s.%n",
customModel.getModelId(),
customModel.getStatus(),
customModel.getTrainingStartedOn(),
customModel.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void beginCopy() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
String copyModelId = "copy-model-Id";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.flatMapMany(copyAuthorization -> formTrainingAsyncClient.beginCopyModel(copyModelId, copyAuthorization))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModelInfo ->
System.out.printf("Copied model has model Id: %s, model status: %s, training started on: %s,"
+ " training completed on: %s.%n",
customFormModelInfo.getModelId(),
customFormModelInfo.getStatus(),
customFormModelInfo.getTrainingStartedOn(),
customFormModelInfo.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void beginCopyOverload() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
String copyModelId = "copy-model-Id";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.flatMapMany(copyAuthorization -> formTrainingAsyncClient.beginCopyModel(copyModelId, copyAuthorization,
Duration.ofSeconds(5)))
.flatMap(AsyncPollResponse::getFinalResult)
.subscribe(customFormModelInfo ->
System.out.printf("Copied model has model Id: %s, model status: %s, training started on: %s,"
+ "training completed on: %s.%n",
customFormModelInfo.getModelId(),
customFormModelInfo.getStatus(),
customFormModelInfo.getTrainingStartedOn(),
customFormModelInfo.getTrainingCompletedOn()));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCopyAuthorization() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
formTrainingAsyncClient.getCopyAuthorization(resourceId, resourceRegion)
.subscribe(copyAuthorization ->
System.out.printf("Copy Authorization for model id: %s, access token: %s, expiration time: %s, "
+ "target resource Id; %s, target resource region: %s%n",
copyAuthorization.getModelId(),
copyAuthorization.getAccessToken(),
copyAuthorization.getExpiresOn(),
copyAuthorization.getResourceId(),
copyAuthorization.getResourceRegion()
));
}
/**
* Code snippet for {@link FormTrainingAsyncClient
*/
public void getCopyAuthorizationWithResponse() {
String resourceId = "target-resource-Id";
String resourceRegion = "target-resource-region";
formTrainingAsyncClient.getCopyAuthorizationWithResponse(resourceId, resourceRegion)
.subscribe(copyAuthorization ->
System.out.printf("Copy Authorization response status: %s, for model id: %s, access token: %s, "
+ "expiration time: %s, target resource Id; %s, target resource region: %s%n",
copyAuthorization.getStatusCode(),
copyAuthorization.getValue().getModelId(),
copyAuthorization.getValue().getAccessToken(),
copyAuthorization.getValue().getExpiresOn(),
copyAuthorization.getValue().getResourceId(),
copyAuthorization.getValue().getResourceRegion()
));
}
} |
what is the purpose of using try() but without catch()? | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Invoice_6.pdf");
byte[] fileContent = Files.readAllBytes(sourceFile.toPath());
PollerFlux<OperationResult, List<RecognizedForm>> recognizeFormPoller;
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
String modelId = "{modelId}";
recognizeFormPoller = client.beginRecognizeCustomForms(toFluxByteBuffer(targetStream),
sourceFile.length(), modelId);
}
Mono<List<RecognizedForm>> recognizeFormResult = recognizeFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm form = recognizedForms.get(i);
System.out.printf("----------- Recognized Form page %d -----------%n", i);
System.out.printf("Form type: %s%n", form.getFormType());
form.getFields().forEach((label, formField) -> {
System.out.printf("Field %s has value %s with confidence score of %.2f.%n", label,
formField.getValueData().getText(),
formField.getConfidence());
});
System.out.print("-----------------------------------");
}
});
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | String modelId = "{modelId}"; | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Invoice_6.pdf");
byte[] fileContent = Files.readAllBytes(sourceFile.toPath());
String modelId = "{modelId}";
PollerFlux<OperationResult, List<RecognizedForm>> recognizeFormPoller;
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
recognizeFormPoller = client.beginRecognizeCustomForms(toFluxByteBuffer(targetStream), sourceFile.length(),
modelId);
}
Mono<List<RecognizedForm>> recognizeFormResult = recognizeFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm form = recognizedForms.get(i);
System.out.printf("----------- Recognized custom form info for page %d -----------%n", i);
System.out.printf("Form type: %s%n", form.getFormType());
form.getFields().forEach((label, formField) -> {
System.out.printf("Field '%s' has label '%s' with confidence score of %.2f.%n", label,
formField.getLabelData().getText(),
formField.getConfidence());
});
}
});
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class RecognizeCustomFormsAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class RecognizeCustomFormsAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
NIT: merge to one sentence: ``` new RecognizeOption().setIncludeFieldElements(true)); ``` | public static void main(String[] args) {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
String modelId = "{model_Id}";
String formUrl = "{form_url}";
PollerFlux<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(true));
Mono<List<RecognizedForm>> recognizeFormResult = recognizeFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
System.out.println("-------- RECOGNIZING FORM --------");
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %d has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) ->
System.out.printf("Field %s has value %s based on %s with a confidence score "
+ "of %.2f.%n", fieldText, fieldValue.getValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %s of Form -------%n", i1);
System.out.printf("Has width %f , angle %f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
final StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point ->
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY())));
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr,
formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
});
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | .setIncludeFieldElements(true)); | public static void main(String[] args) {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
String modelId = "{model_Id}";
String formUrl = "{form_url}";
PollerFlux<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomFormsFromUrl(formUrl, modelId,
new RecognizeOptions()
.setIncludeFieldElements(true));
Mono<List<RecognizedForm>> recognizeFormResult = recognizeFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %d has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) ->
System.out.printf("Field %s has value %s based on %s with a confidence score "
+ "of %.2f.%n", fieldText, fieldValue.getValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing info on page %s of Form -------%n", i1);
System.out.printf("Has width: %f , angle: %f, height: %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().stream()
.filter(formContent -> formContent instanceof FormWord)
.map(formContent -> (FormWord) (formContent))
.forEach(formWordElement -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point ->
boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY())));
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr,
formWordElement.getConfidence());
});
});
System.out.println();
}
}
}
});
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class GetBoundingBoxesAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | class GetBoundingBoxesAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} |
checkstyle issue? line too long? | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String formUrl = "{form_url}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomFormsFromUrl(formUrl, modelId, new RecognizeOptions()
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
System.out.println("-------- RECOGNIZING FORM --------");
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %d has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %d of Form -------%n", i1);
System.out.printf("Has width %f , angle %.2f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY())));
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr, formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
} | formWordElement.getBoundingBox().getPoints().forEach(point -> boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(), | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String formUrl = "{form_url}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomFormsFromUrl(formUrl, modelId, new RecognizeOptions()
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %d has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing info on page %s of Form -------%n", i1);
System.out.printf("Has width: %f, angle: %.2f, height: %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells()
.forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().stream()
.filter(formContent -> formContent instanceof FormWord)
.map(formContent -> (FormWord) (formContent))
.forEach(formWordElement -> {
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints()
.forEach(point -> boundingBoxStr.append(
String.format("[%.2f, %.2f]", point.getX(), point.getY())));
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr.toString(), formWordElement.getConfidence());
});
});
System.out.println();
}
}
}
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} |
it is in sample, should be fine | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String formUrl = "{form_url}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomFormsFromUrl(formUrl, modelId, new RecognizeOptions()
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
System.out.println("-------- RECOGNIZING FORM --------");
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %d has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing Page %d of Form -------%n", i1);
System.out.printf("Has width %f , angle %.2f, height %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().forEach(formContent -> {
if (formContent instanceof FormWord) {
FormWord formWordElement = (FormWord) (formContent);
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints().forEach(point -> boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(),
point.getY())));
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr, formWordElement.getConfidence());
}
});
});
System.out.println();
}
}
}
} | formWordElement.getBoundingBox().getPoints().forEach(point -> boundingBoxStr.append(String.format("[%.2f, %.2f]", point.getX(), | public static void main(String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String modelId = "{model_Id}";
String formUrl = "{form_url}";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeFormPoller =
client.beginRecognizeCustomFormsFromUrl(formUrl, modelId, new RecognizeOptions()
.setIncludeFieldElements(true));
List<RecognizedForm> recognizedForms = recognizeFormPoller.getFinalResult();
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm recognizedForm = recognizedForms.get(i);
System.out.printf("Form %d has type: %s%n", i, recognizedForm.getFormType());
recognizedForm.getFields().forEach((fieldText, fieldValue) -> System.out.printf("Field %s has value %s "
+ "based on %s with a confidence score "
+ "of %.2f.%n",
fieldText, fieldValue.getValue(), fieldValue.getValueData().getText(),
fieldValue.getConfidence()));
final List<FormPage> pages = recognizedForm.getPages();
for (int i1 = 0; i1 < pages.size(); i1++) {
final FormPage formPage = pages.get(i1);
System.out.printf("-------Recognizing info on page %s of Form -------%n", i1);
System.out.printf("Has width: %f, angle: %.2f, height: %f %n", formPage.getWidth(),
formPage.getTextAngle(), formPage.getHeight());
System.out.println("Recognized Tables: ");
final List<FormTable> tables = formPage.getTables();
for (int i2 = 0; i2 < tables.size(); i2++) {
final FormTable formTable = tables.get(i2);
System.out.printf("Table %d%n", i2);
formTable.getCells()
.forEach(formTableCell -> {
System.out.printf("Cell text %s has following words: %n", formTableCell.getText());
formTableCell.getFieldElements().stream()
.filter(formContent -> formContent instanceof FormWord)
.map(formContent -> (FormWord) (formContent))
.forEach(formWordElement -> {
StringBuilder boundingBoxStr = new StringBuilder();
if (formWordElement.getBoundingBox() != null) {
formWordElement.getBoundingBox().getPoints()
.forEach(point -> boundingBoxStr.append(
String.format("[%.2f, %.2f]", point.getX(), point.getY())));
}
System.out.printf("Word '%s' within bounding box %s with a confidence of %.2f.%n",
formWordElement.getText(), boundingBoxStr.toString(), formWordElement.getConfidence());
});
});
System.out.println();
}
}
}
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} | class GetBoundingBoxes {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*/
} |
This is an auto closeable try block. This will make sure the stream is closed out of the block. | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Invoice_6.pdf");
byte[] fileContent = Files.readAllBytes(sourceFile.toPath());
PollerFlux<OperationResult, List<RecognizedForm>> recognizeFormPoller;
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
String modelId = "{modelId}";
recognizeFormPoller = client.beginRecognizeCustomForms(toFluxByteBuffer(targetStream),
sourceFile.length(), modelId);
}
Mono<List<RecognizedForm>> recognizeFormResult = recognizeFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm form = recognizedForms.get(i);
System.out.printf("----------- Recognized Form page %d -----------%n", i);
System.out.printf("Form type: %s%n", form.getFormType());
form.getFields().forEach((label, formField) -> {
System.out.printf("Field %s has value %s with confidence score of %.2f.%n", label,
formField.getValueData().getText(),
formField.getConfidence());
});
System.out.print("-----------------------------------");
}
});
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | String modelId = "{modelId}"; | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Invoice_6.pdf");
byte[] fileContent = Files.readAllBytes(sourceFile.toPath());
String modelId = "{modelId}";
PollerFlux<OperationResult, List<RecognizedForm>> recognizeFormPoller;
try (InputStream targetStream = new ByteArrayInputStream(fileContent)) {
recognizeFormPoller = client.beginRecognizeCustomForms(toFluxByteBuffer(targetStream), sourceFile.length(),
modelId);
}
Mono<List<RecognizedForm>> recognizeFormResult = recognizeFormPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
recognizeFormResult.subscribe(recognizedForms -> {
for (int i = 0; i < recognizedForms.size(); i++) {
final RecognizedForm form = recognizedForms.get(i);
System.out.printf("----------- Recognized custom form info for page %d -----------%n", i);
System.out.printf("Form type: %s%n", form.getFormType());
form.getFields().forEach((label, formField) -> {
System.out.printf("Field '%s' has label '%s' with confidence score of %.2f.%n", label,
formField.getLabelData().getText(),
formField.getConfidence());
});
}
});
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class RecognizeCustomFormsAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class RecognizeCustomFormsAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
NIT: assign local variable for feedResponse.getResults() to avoid repetitive invocation of the property getter | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) { | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
Not sure - my style-preference would be changeFeedProcessorState .setEstimatedLag(0) .setContinuationToken(latestLsn); But feel free to ignore if the non-fluent style is what makes the code more consistent etc. | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | changeFeedProcessorState.setContinuationToken(latestLsn); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
Wouldn't this indicate a critical failure - tracing as warning and ignoring unexpected backend response seems to make it harder than necessary to identify breaking changes / debug it? | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | } catch (NumberFormatException ex) { | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
Again - feel free to ignore for consistency etc. - I am still struggling with Java not having a clear way to distinguish ReadOnlyList in the contract. But form my intuition I would expect it to be clearly documented in the Api doc comments if a method returns an unmodifiable list - so would it make sense to add a comment along tehse lines above? Also wouldn't it make sense to also return an unmodifiable list in the short-cut - when the list is empty) above to be consistent? | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | .map(Collections::unmodifiableList); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
Though these have one assignment, it will look a bit odd when compared with similar patterns in the rest of the implementation (not just CFP). And since they are not consumed by any Reactor code as arguments down the execution path which requires as such, they don't really need to be final... | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | String sessionTokenLsn = feedResponse.getSessionToken(); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
I agree, this should be fluent style - if changeFeedProcessorState supports it. | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | changeFeedProcessorState.setContinuationToken(latestLsn); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
_ts should always be a long - right ? | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | } catch (NumberFormatException ex) { | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
yep, makes sense since these are also shared in the code above. | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | final String segmentSeparator = " | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
I'll keep it as such, it is easier to read the code. The property getter does not execute any particular complex operation, it just returns an internal member. | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) { | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
In general I prefer the same "fluidity", especially after invoking the constructor :-) | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | changeFeedProcessorState.setContinuationToken(latestLsn); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
The error though unexpected (_ts system property is an epoch), it is not fatal (there's a log warning capturing this). Setting the continuation token as "null" is an indicator that the document found is not valid; i.e. we initialized CFP leases but we are yet to process any changes in this particular scope/partition. | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | } catch (NumberFormatException ex) { | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
yep, I've updated the doc to capture this is a read only list. | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | .map(Collections::unmodifiableList); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
also fixed the empty list early returned case (thanks for catching that up). | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | .map(Collections::unmodifiableList); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
String.split uses regex underneath which is cpu intensive. use StringUtils.split instead. | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = parsedSessionToken.split(SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(PROPERTY_NAME_TS).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | String[] segments = parsedSessionToken.split(SEGMENT_SEPARATOR); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = parsedSessionToken.split(SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(PROPERTY_NAME_TS).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
also java compiler may inline these. so shouldn't be a perf hit. | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) { | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
```suggestion assertThat(totalLag).equalTo(FEED_COUNT).as("...") ``` | public void getCurrentState() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessor)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessor.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start and stopped in the expected time", ex);
throw ex;
}
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
List<ChangeFeedProcessorState> cfpCurrentState = changeFeedProcessor.getCurrentState()
.map(state -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(state));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return state;
}).block();
int totalLag = 0;
for (ChangeFeedProcessorState item : cfpCurrentState) {
totalLag += item.getEstimatedLag();
}
assertThat(totalLag == 0).as("Change Feed Processor estimated total lag at start").isTrue();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
cfpCurrentState = changeFeedProcessor.getCurrentState()
.map(state -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(state));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return state;
}).block();
totalLag = 0;
for (ChangeFeedProcessorState item : cfpCurrentState) {
totalLag += item.getEstimatedLag();
}
assertThat(totalLag == FEED_COUNT).as("Change Feed Processor estimated total lag").isTrue();
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | assertThat(totalLag == FEED_COUNT).as("Change Feed Processor estimated total lag").isTrue(); | public void getCurrentState() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessor)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessor.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start and stopped in the expected time", ex);
throw ex;
}
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
List<ChangeFeedProcessorState> cfpCurrentState = changeFeedProcessor.getCurrentState()
.map(state -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(state));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return state;
}).block();
int totalLag = 0;
for (ChangeFeedProcessorState item : cfpCurrentState) {
totalLag += item.getEstimatedLag();
}
assertThat(totalLag).isEqualTo(0).as("Change Feed Processor estimated total lag at start");
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
cfpCurrentState = changeFeedProcessor.getCurrentState()
.map(state -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(state));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return state;
}).block();
totalLag = 0;
for (ChangeFeedProcessorState item : cfpCurrentState) {
totalLag += item.getEstimatedLag();
}
assertThat(totalLag).isEqualTo(FEED_COUNT).as("Change Feed Processor estimated total lag");
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(ZonedDateTime.now(ZoneOffset.UTC).minusDays(1).toInstant())
.setMinScaleCount(1)
.setMaxScaleCount(3)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void getEstimatedLag() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessor)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessor.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start and stopped in the expected time", ex);
throw ex;
}
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
Map<String, Integer> estimatedLagResult = changeFeedProcessor.getEstimatedLag()
.map(getEstimatedLag -> {
System.out.println(getEstimatedLag);
return getEstimatedLag;
}).block();
int totalLag = 0;
for (int lag : estimatedLagResult.values()) {
totalLag += lag;
}
assertThat(totalLag == 0).as("Change Feed Processor estimated total lag at start").isTrue();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
estimatedLagResult = changeFeedProcessor.getEstimatedLag()
.map(getEstimatedLag -> {
System.out.println(getEstimatedLag);
return getEstimatedLag;
}).block();
totalLag = 0;
for (int lag : estimatedLagResult.values()) {
totalLag += lag;
}
assertThat(totalLag == FEED_COUNT).as("Change Feed Processor estimated total lag").isTrue();
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = new ChangeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.buildChangeFeedProcessor();
ChangeFeedProcessor changeFeedProcessorSecond = new ChangeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
createdLeaseCollection.queryItems(querySpec, cosmosQueryRequestOptions, InternalObjectNode.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<InternalObjectNode> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.buildChangeFeedProcessor();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit
.readThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit
.replaceThroughput(ThroughputProperties.createManualThroughput(FEED_COLLECTION_THROUGHPUT))
.subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<InternalObjectNode> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<InternalObjectNode> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<InternalObjectNode> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<InternalObjectNode> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private InternalObjectNode getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
InternalObjectNode doc = new InternalObjectNode(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(ZonedDateTime.now(ZoneOffset.UTC).minusDays(1).toInstant())
.setMinScaleCount(1)
.setMaxScaleCount(3)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void getEstimatedLag() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessor)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessor.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start and stopped in the expected time", ex);
throw ex;
}
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
Map<String, Integer> estimatedLagResult = changeFeedProcessor.getEstimatedLag()
.map(estimatedLag -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(estimatedLag));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return estimatedLag;
}).block();
int totalLag = 0;
for (int lag : estimatedLagResult.values()) {
totalLag += lag;
}
assertThat(totalLag).isEqualTo(0).as("Change Feed Processor estimated total lag at start");
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
estimatedLagResult = changeFeedProcessor.getEstimatedLag()
.map(estimatedLag -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(estimatedLag));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return estimatedLag;
}).block();
totalLag = 0;
for (int lag : estimatedLagResult.values()) {
totalLag += lag;
}
assertThat(totalLag).isEqualTo(FEED_COUNT).as("Change Feed Processor estimated total lag");
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = new ChangeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.buildChangeFeedProcessor();
ChangeFeedProcessor changeFeedProcessorSecond = new ChangeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
createdLeaseCollection.queryItems(querySpec, cosmosQueryRequestOptions, InternalObjectNode.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<InternalObjectNode> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.buildChangeFeedProcessor();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit
.readThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit
.replaceThroughput(ThroughputProperties.createManualThroughput(FEED_COLLECTION_THROUGHPUT))
.subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<InternalObjectNode> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<InternalObjectNode> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<InternalObjectNode> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<InternalObjectNode> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private InternalObjectNode getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
InternalObjectNode doc = new InternalObjectNode(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
fixed | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = parsedSessionToken.split(SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(PROPERTY_NAME_TS).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | String[] segments = parsedSessionToken.split(SEGMENT_SEPARATOR); | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = parsedSessionToken.split(SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(PROPERTY_NAME_TS).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
fixed | public void getCurrentState() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessor)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessor.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start and stopped in the expected time", ex);
throw ex;
}
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
List<ChangeFeedProcessorState> cfpCurrentState = changeFeedProcessor.getCurrentState()
.map(state -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(state));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return state;
}).block();
int totalLag = 0;
for (ChangeFeedProcessorState item : cfpCurrentState) {
totalLag += item.getEstimatedLag();
}
assertThat(totalLag == 0).as("Change Feed Processor estimated total lag at start").isTrue();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
cfpCurrentState = changeFeedProcessor.getCurrentState()
.map(state -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(state));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return state;
}).block();
totalLag = 0;
for (ChangeFeedProcessorState item : cfpCurrentState) {
totalLag += item.getEstimatedLag();
}
assertThat(totalLag == FEED_COUNT).as("Change Feed Processor estimated total lag").isTrue();
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | assertThat(totalLag == FEED_COUNT).as("Change Feed Processor estimated total lag").isTrue(); | public void getCurrentState() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessor)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessor.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start and stopped in the expected time", ex);
throw ex;
}
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
List<ChangeFeedProcessorState> cfpCurrentState = changeFeedProcessor.getCurrentState()
.map(state -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(state));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return state;
}).block();
int totalLag = 0;
for (ChangeFeedProcessorState item : cfpCurrentState) {
totalLag += item.getEstimatedLag();
}
assertThat(totalLag).isEqualTo(0).as("Change Feed Processor estimated total lag at start");
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
cfpCurrentState = changeFeedProcessor.getCurrentState()
.map(state -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(state));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return state;
}).block();
totalLag = 0;
for (ChangeFeedProcessorState item : cfpCurrentState) {
totalLag += item.getEstimatedLag();
}
assertThat(totalLag).isEqualTo(FEED_COUNT).as("Change Feed Processor estimated total lag");
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(ZonedDateTime.now(ZoneOffset.UTC).minusDays(1).toInstant())
.setMinScaleCount(1)
.setMaxScaleCount(3)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void getEstimatedLag() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessor)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessor.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start and stopped in the expected time", ex);
throw ex;
}
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
Map<String, Integer> estimatedLagResult = changeFeedProcessor.getEstimatedLag()
.map(getEstimatedLag -> {
System.out.println(getEstimatedLag);
return getEstimatedLag;
}).block();
int totalLag = 0;
for (int lag : estimatedLagResult.values()) {
totalLag += lag;
}
assertThat(totalLag == 0).as("Change Feed Processor estimated total lag at start").isTrue();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
estimatedLagResult = changeFeedProcessor.getEstimatedLag()
.map(getEstimatedLag -> {
System.out.println(getEstimatedLag);
return getEstimatedLag;
}).block();
totalLag = 0;
for (int lag : estimatedLagResult.values()) {
totalLag += lag;
}
assertThat(totalLag == FEED_COUNT).as("Change Feed Processor estimated total lag").isTrue();
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = new ChangeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.buildChangeFeedProcessor();
ChangeFeedProcessor changeFeedProcessorSecond = new ChangeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
createdLeaseCollection.queryItems(querySpec, cosmosQueryRequestOptions, InternalObjectNode.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<InternalObjectNode> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.buildChangeFeedProcessor();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit
.readThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit
.replaceThroughput(ThroughputProperties.createManualThroughput(FEED_COLLECTION_THROUGHPUT))
.subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<InternalObjectNode> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<InternalObjectNode> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<InternalObjectNode> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<InternalObjectNode> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private InternalObjectNode getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
InternalObjectNode doc = new InternalObjectNode(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} | class ChangeFeedProcessorTest extends TestSuiteBase {
private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class);
private static final ObjectMapper OBJECT_MAPPER = Utils.getSimpleObjectMapper();
private CosmosAsyncDatabase createdDatabase;
private final String hostName = RandomStringUtils.randomAlphabetic(6);
private final int FEED_COUNT = 10;
private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000;
private final int FEED_COLLECTION_THROUGHPUT = 10100;
private final int FEED_COLLECTION_THROUGHPUT_FOR_SPLIT = 400;
private final int LEASE_COLLECTION_THROUGHPUT = 400;
private CosmosAsyncClient client;
private ChangeFeedProcessor changeFeedProcessor;
@Factory(dataProvider = "clientBuilders")
public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "emulator" }, timeOut = 2 * TIMEOUT)
public void readFeedDocumentsStartFromBeginning() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsStartFromCustomDate() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(20))
.setLeaseAcquireInterval(Duration.ofSeconds(10))
.setLeaseExpirationInterval(Duration.ofSeconds(30))
.setFeedPollDelay(Duration.ofSeconds(1))
.setLeasePrefix("TEST")
.setMaxItemCount(10)
.setStartTime(ZonedDateTime.now(ZoneOffset.UTC).minusDays(1).toInstant())
.setMinScaleCount(1)
.setMaxScaleCount(3)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start in the expected time", ex);
throw ex;
}
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 40 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void getEstimatedLag() throws InterruptedException {
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.buildChangeFeedProcessor();
try {
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessor)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessor.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.subscribe();
} catch (Exception ex) {
log.error("Change feed processor did not start and stopped in the expected time", ex);
throw ex;
}
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
Map<String, Integer> estimatedLagResult = changeFeedProcessor.getEstimatedLag()
.map(estimatedLag -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(estimatedLag));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return estimatedLag;
}).block();
int totalLag = 0;
for (int lag : estimatedLagResult.values()) {
totalLag += lag;
}
assertThat(totalLag).isEqualTo(0).as("Change Feed Processor estimated total lag at start");
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollection, FEED_COUNT);
estimatedLagResult = changeFeedProcessor.getEstimatedLag()
.map(estimatedLag -> {
try {
log.info(OBJECT_MAPPER.writeValueAsString(estimatedLag));
} catch (JsonProcessingException ex) {
log.error("Unexpected", ex);
}
return estimatedLag;
}).block();
totalLag = 0;
for (int lag : estimatedLagResult.values()) {
totalLag += lag;
}
assertThat(totalLag).isEqualTo(FEED_COUNT).as("Change Feed Processor estimated total lag");
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
@Test(groups = { "emulator" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void staledLeaseAcquiring() throws InterruptedException {
final String ownerFirst = "Owner_First";
final String ownerSecond = "Owner_Second";
final String leasePrefix = "TEST";
CosmosAsyncContainer createdFeedCollection = createFeedCollection(FEED_COLLECTION_THROUGHPUT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
ChangeFeedProcessor changeFeedProcessorFirst = new ChangeFeedProcessorBuilder()
.hostName(ownerFirst)
.handleChanges(docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix(leasePrefix)
)
.buildChangeFeedProcessor();
ChangeFeedProcessor changeFeedProcessorSecond = new ChangeFeedProcessorBuilder()
.hostName(ownerSecond)
.handleChanges((List<JsonNode> docs) -> {
ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond);
})
.feedContainer(createdFeedCollection)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeaseRenewInterval(Duration.ofSeconds(10))
.setLeaseAcquireInterval(Duration.ofSeconds(5))
.setLeaseExpirationInterval(Duration.ofSeconds(20))
.setFeedPollDelay(Duration.ofSeconds(2))
.setLeasePrefix(leasePrefix)
.setMaxItemCount(10)
.setStartFromBeginning(true)
.setMaxScaleCount(0)
)
.buildChangeFeedProcessor();
try {
changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.then(Mono.just(changeFeedProcessorFirst)
.delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.flatMap(value -> changeFeedProcessorFirst.stop()
.subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
))
.doOnSuccess(aVoid -> {
try {
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first");
SqlParameter param = new SqlParameter();
param.setName("@PartitionLeasePrefix");
param.setValue(leasePrefix);
SqlQuerySpec querySpec = new SqlQuerySpec(
"SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", Collections.singletonList(param));
CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions();
createdLeaseCollection.queryItems(querySpec, cosmosQueryRequestOptions, InternalObjectNode.class).byPage()
.flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.getResults()))
.flatMap(doc -> {
ServiceItemLease leaseDocument = ServiceItemLease.fromDocument(doc);
leaseDocument.setOwner("TEMP_OWNER");
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
return createdLeaseCollection.replaceItem(doc, doc.getId(), new PartitionKey(doc.getId()), options)
.map(itemResponse -> BridgeInternal.getProperties(itemResponse));
})
.map(ServiceItemLease::fromDocument)
.map(leaseDocument -> {
ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner());
return leaseDocument;
})
.last()
.flatMap(leaseDocument -> {
ChangeFeedProcessorTest.log.info("Start creating documents");
List<InternalObjectNode> docDefList = new ArrayList<>();
for (int i = 0; i < FEED_COUNT; i++) {
docDefList.add(getDocumentDefinition());
}
return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT)
.last()
.delayElement(Duration.ofMillis(1000))
.flatMap(cosmosItemResponse -> {
ChangeFeedProcessorTest.log.info("Start second Change feed processor");
return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT));
});
})
.subscribe();
})
.subscribe();
} catch (Exception ex) {
log.error("First change feed processor did not start in the expected time", ex);
throw ex;
}
long remainingWork = 10 * CHANGE_FEED_PROCESSOR_TIMEOUT;
while (remainingWork > 0 && changeFeedProcessorFirst.isStarted() && !changeFeedProcessorSecond.isStarted()) {
remainingWork -= 100;
Thread.sleep(100);
}
waitToReceiveDocuments(receivedDocuments, 10 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
assertThat(changeFeedProcessorSecond.isStarted()).as("Change Feed Processor instance is running").isTrue();
changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollection);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
@Test(groups = { "simple" }, timeOut = 50 * CHANGE_FEED_PROCESSOR_TIMEOUT)
public void readFeedDocumentsAfterSplit() throws InterruptedException {
CosmosAsyncContainer createdFeedCollectionForSplit = createLeaseCollection(FEED_COLLECTION_THROUGHPUT_FOR_SPLIT);
CosmosAsyncContainer createdLeaseCollection = createLeaseCollection(LEASE_COLLECTION_THROUGHPUT);
try {
List<InternalObjectNode> createdDocuments = new ArrayList<>();
Map<String, JsonNode> receivedDocuments = new ConcurrentHashMap<>();
setupReadFeedDocuments(createdDocuments, receivedDocuments, createdFeedCollectionForSplit, FEED_COUNT);
changeFeedProcessor = new ChangeFeedProcessorBuilder()
.hostName(hostName)
.handleChanges(changeFeedProcessorHandler(receivedDocuments))
.feedContainer(createdFeedCollectionForSplit)
.leaseContainer(createdLeaseCollection)
.options(new ChangeFeedProcessorOptions()
.setLeasePrefix("TEST")
.setStartFromBeginning(true)
.setMaxItemCount(10)
)
.buildChangeFeedProcessor();
changeFeedProcessor.start().subscribeOn(Schedulers.elastic())
.timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT))
.onErrorResume(throwable -> {
log.error("Change feed processor did not start in the expected time", throwable);
return Mono.error(throwable);
})
.doOnSuccess(aVoid -> {
try {
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
})
.then(
createdFeedCollectionForSplit
.readThroughput().subscribeOn(Schedulers.elastic())
.flatMap(currentThroughput ->
createdFeedCollectionForSplit
.replaceThroughput(ThroughputProperties.createManualThroughput(FEED_COLLECTION_THROUGHPUT))
.subscribeOn(Schedulers.elastic())
)
.then()
)
.subscribe();
Thread.sleep(4 * CHANGE_FEED_PROCESSOR_TIMEOUT);
String partitionKeyRangesPath = extractContainerSelfLink(createdFeedCollectionForSplit);
AsyncDocumentClient contextClient = getContextClient(createdDatabase);
Flux.just(1).subscribeOn(Schedulers.elastic())
.flatMap(value -> {
log.warn("Reading current hroughput change.");
return contextClient.readPartitionKeyRanges(partitionKeyRangesPath, null);
})
.map(partitionKeyRangeFeedResponse -> {
int count = partitionKeyRangeFeedResponse.getResults().size();
if (count < 2) {
log.warn("Throughput change is pending.");
throw new RuntimeException("Throughput change is not done.");
}
return count;
})
.retry(40, throwable -> {
try {
log.warn("Retrying...");
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted exception", e);
}
return true;
})
.last().block();
assertThat(changeFeedProcessor.isStarted()).as("Change Feed Processor instance is running").isTrue();
createReadFeedDocuments(createdDocuments, createdFeedCollectionForSplit, FEED_COUNT);
waitToReceiveDocuments(receivedDocuments, 2 * CHANGE_FEED_PROCESSOR_TIMEOUT, FEED_COUNT * 2);
changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe();
for (InternalObjectNode item : createdDocuments) {
assertThat(receivedDocuments.containsKey(item.getId())).as("Document with getId: " + item.getId()).isTrue();
}
Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT);
} finally {
safeDeleteCollection(createdFeedCollectionForSplit);
safeDeleteCollection(createdLeaseCollection);
Thread.sleep(500);
}
}
private Consumer<List<JsonNode>> changeFeedProcessorHandler(Map<String, JsonNode> receivedDocuments) {
return docs -> {
ChangeFeedProcessorTest.log.info("START processing from thread in test {}", Thread.currentThread().getId());
for (JsonNode item : docs) {
processItem(item, receivedDocuments);
}
ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId());
};
}
private void waitToReceiveDocuments(Map<String, JsonNode> receivedDocuments, long timeoutInMillisecond, long count) throws InterruptedException {
long remainingWork = timeoutInMillisecond;
while (remainingWork > 0 && receivedDocuments.size() < count) {
remainingWork -= 100;
Thread.sleep(100);
}
assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue();
}
@BeforeMethod(groups = { "emulator", "simple" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true)
public void beforeMethod() {
}
@BeforeClass(groups = { "emulator", "simple" }, timeOut = SETUP_TIMEOUT, alwaysRun = true)
public void before_ChangeFeedProcessorTest() {
client = getClientBuilder().buildAsyncClient();
createdDatabase = getSharedCosmosDatabase(client);
}
@AfterMethod(groups = { "emulator", "simple" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterMethod() {
}
@AfterClass(groups = { "emulator", "simple" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void setupReadFeedDocuments(List<InternalObjectNode> createdDocuments, Map<String, JsonNode> receivedDocuments, CosmosAsyncContainer feedCollection, long count) {
List<InternalObjectNode> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private void createReadFeedDocuments(List<InternalObjectNode> createdDocuments, CosmosAsyncContainer feedCollection, long count) {
List<InternalObjectNode> docDefList = new ArrayList<>();
for(int i = 0; i < count; i++) {
docDefList.add(getDocumentDefinition());
}
createdDocuments.addAll(bulkInsertBlocking(feedCollection, docDefList));
waitIfNeededForReplicasToCatchUp(getClientBuilder());
}
private InternalObjectNode getDocumentDefinition() {
String uuid = UUID.randomUUID().toString();
InternalObjectNode doc = new InternalObjectNode(String.format("{ "
+ "\"id\": \"%s\", "
+ "\"mypk\": \"%s\", "
+ "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]"
+ "}"
, uuid, uuid));
return doc;
}
private CosmosAsyncContainer createFeedCollection(int provisionedThroughput) {
CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions();
return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, provisionedThroughput);
}
private CosmosAsyncContainer createLeaseCollection(int provisionedThroughput) {
CosmosContainerRequestOptions options = new CosmosContainerRequestOptions();
CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(
"leases_" + UUID.randomUUID(),
"/id");
return createCollection(createdDatabase, collectionDefinition, options, provisionedThroughput);
}
private static synchronized void processItem(JsonNode item, Map<String, JsonNode> receivedDocuments) {
try {
ChangeFeedProcessorTest.log
.info("RECEIVED {}", OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(item));
} catch (JsonProcessingException e) {
log.error("Failure in processing json [{}]", e.getMessage(), e);
}
receivedDocuments.put(item.get("id").asText(), item);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.